
"""the data model"""

import re
import logging
import hashlib
from google.appengine.ext import db
import pypione
from pypione import net
from pypione.exc import PkgInfoNotFound, PkgDownloadError
from pypione.package import *

log = logging.getLogger()

_invalid_propname = re.compile(r'[^a-z0-9_]+')
def add_properties(expando, props):
    log.info("adding %s properties to %s", len(props.keys()), expando)
    for key, value in props.iteritems():
        key = key.lower()
        key = _invalid_propname.sub("_", key)
        if len(value) > 300:
            value = db.Text(value)
        setattr(expando, key, value)

class Package(db.Expando):
    last_modified = db.DateTimeProperty(auto_now_add=True)
    name = db.StringProperty()
    name_lower = db.StringProperty(default=None)
    version = db.StringProperty()
    
    @classmethod
    def new(cls, name, version, **kw):
        key_name = "%s-%s" % (name, version)
        return cls(key_name=key_name, 
                    name=name,
                    name_lower=name.lower(),
                    version=version,
                    **kw)
    
class PackageFile(db.Expando):
    last_modified = db.DateTimeProperty(auto_now_add=True)
    package = db.ReferenceProperty(Package)
    data = db.BlobProperty()
    filename = db.StringProperty()
    packagetype = db.StringProperty() # i.e. sdist, bdist_egg
    bytes = db.IntegerProperty()
    md5_digest = db.StringProperty()
    python_version = db.StringProperty() # i.e. source
    
    @classmethod
    def new(cls, name, release, filename, download_url, 
            md5_digest=None, packagetype=None, size=None, python_version=None, **kw):
            
        e = cls(key_name=filename, **kw)
        e.filename = filename
        e.packagetype = packagetype
        e.python_version = python_version
        
        log.info("attempting to fetch URL:%s, %s bytes ...", download_url, size or "[unknown]")
        fetched_with_range = False
        try:
            f = net.urlfetch.fetch(download_url)
        except net.ResponseTooLargeError:
            if not md5_digest:
                raise PkgDownloadError(
                    "Got ResponseTooLargeError for %s and too sheepish to attempt an "
                    "HTTP range download because there is no md5 to compare against" % (
                                                                            download_url))
            if not size:
                raise PkgDownloadError(
                    "Got ResponseTooLargeError for %s but cannot fetch in chunks "
                    "because size was not defined" % (download_url))
            
            response = []
            chunksize = 500
            chunks = int(round(float(size)/float(chunksize)))
            start, end = 0, chunksize-1
            for chunk in range(chunks):
                byte_range = "%s-%s" % (start, end)
                # try sending Range headers to fetch it in chunks ...
                f = net.urlfetch.fetch(download_url, headers={'Range': 'bytes=%s' % byte_range},
                                        allow_truncated=True)
                # Note 206 is Partial Content
                assert f.status_code==206, "[bytes=%s] Expected 206, Got Status %s" % (
                                                                byte_range, f.status_code) 
                ##### FIXME.  This is acting funny. Sometimes the above gets 206, 
                ##### other times 200.  It should always be 206
                raise ValueError("bytes=%s length=%s content_was_truncated=%s header_said_length=%s" % (
                                 byte_range, len(f.content), f.content_was_truncated, f.headers['Content-Length']))
                response.append(f.content)
                start += chunksize
                end += chunksize
            response = "".join(response)
            fetched_with_range = True
        else:
            assert f.status_code==200, "Expected 200, Got Status %s" % (f.status_code)
            response = f.content
        e.data = response
        pkg_md5_digest = hashlib.md5(e.data).hexdigest()
        if md5_digest:
            if md5_digest != pkg_md5_digest:
                raise PkgDownloadError(
                    "Cannot trust %s (md5 %s != expected %s, fetched_with_range=%s)" % (
                                            download_url, pkg_md5_digest, md5_digest, fetched_with_range))
        
        # with external download URLs we don't get an md5
        e.md5_digest = pkg_md5_digest
        e.bytes = size or len(e.data)
        
        pkg_info = None
        try:
            if has_extension(e.filename, ('.egg', '.zip')):
                log.info("%s is a zipped egg file", e.filename)
                pkg_info = read_pkg_info_from_egg(e.filename, e.data)
            elif has_extension(e.filename,
                    ('.tar', 'tar.bz2', 'tar.gz', 'tar.Z', '.tgz')):
                log.info("%s is a tar file", e.filename)
                pkg_info = read_pkg_info_from_tar(e.filename, e.data)
            else:
                raise UnsupportedFormatError("not sure how to read %s" % e.filename)
        except PkgInfoNotFound:
            # was not built with setuptools
            extra_meta = None
        else:
            extra_meta = unpack_pkg_info(pkg_info)
        
        package = Package.new(name, release)
        if extra_meta:
            add_properties(package, extra_meta)
        package.save()
        
        e.package = package
        return e
        
class Queue(db.Expando):
    channel = db.StringProperty()
    established = db.DateTimeProperty(auto_now_add=True)
    last_attempt = db.DateTimeProperty()
    completed = db.BooleanProperty(default=False)
    
    @classmethod
    def pending(cls, channel):
        return cls.all().filter("channel = ", channel).filter("completed = ", False).order('established')
        
class QueueAllow(db.Model):
    ipaddr = db.StringProperty()
    note = db.StringProperty()
    
    @classmethod
    def ip_is_allowed(cls, ipaddr):
        return ipaddr is not None and ipaddr in [ip.ipaddr for ip in cls.all().fetch(20)]