#!/usr/bin/env python

from google.appengine.ext import db 
from google.appengine.api.urlfetch import fetch
from google.appengine.api import memcache
import re
import logging
from datetime import datetime

# These classes define the data objects
# that you will be able to store in
# AppEngine's data store.

MIN_DATETIME = datetime(1901, 1, 1)

class Library(db.Model):
# Required properties
    name = db.StringProperty(required=True)
    title = db.StringProperty(required=True)
    link = db.LinkProperty(required=True)
    missing = db.StringProperty(indexed=False)
    match_isbn = db.BooleanProperty(indexed=False)
    version = db.IntegerProperty(default=1)
    is_community = db.BooleanProperty(required=True, default=False)
    
    text_search = db.StringListProperty(indexed=True)

# Optional properties
    have = db.StringProperty()
    dont = db.StringProperty()

    remote_ip = db.StringProperty()

# Problems
    problems = db.StringProperty(multiline=True)
    problem_count = db.IntegerProperty(default=0)
    has_problems = db.BooleanProperty(default=False)
    has_problems_3 = db.BooleanProperty(default=False)

# Timestamps
    created = db.DateTimeProperty(auto_now_add=True)
    updated = db.DateTimeProperty(auto_now=True)
    tested = db.DateTimeProperty(default=MIN_DATETIME)
    was_valid = db.DateTimeProperty(default=MIN_DATETIME)
    
    def delete(self):
        super(Library, self).delete()
        Library.invalidate_cache()
        
    def put(self):
        self.normalize()
        super(Library, self).put()
        Library.invalidate_cache()
        
    def normalize(self):
    
        self.text_search = Library.buildSearchTerm(self.title)
        
        self.has_problems = self.problem_count >= 1
        self.has_problems_2 = self.problem_count >= 2
        self.has_problems_3 = self.problem_count >= 3
        
        if self.tested < MIN_DATETIME:
            self.tested = MIN_DATETIME
        if self.was_valid < MIN_DATETIME:
            self.was_valid = MIN_DATETIME
    
    def toDict(self):
        data = {
            "name": self.name,
            "title": self.title,
            "link": self.link,
            "have": self.have,
            "dont": self.dont,
            "missing": self.missing,
            "match_isbn": self.match_isbn,
            "problems": self.problems,
            "has_problems": self.has_problems,
            "problem_count": self.problem_count,
            "is_community": self.is_community,
        }
        if self.tested and self.tested != MIN_DATETIME:
            tested = self.tested
            data["tested"] = tested.strftime('%Y-%m-%dT%H:%M:%SZ')
            
        if self.was_valid and self.was_valid != MIN_DATETIME:
            was_valid = self.was_valid
            data["was_valid"] = was_valid.strftime('%Y-%m-%dT%H:%M:%SZ')
        else:
            data["problem_count"] = self.problem_count
            
        return data
    
    def siteUrl(self, isbn):
        return self.link.replace('#{ISBN}', isbn)
    
    def process(self, isbn):
        reply = fetch(self.siteUrl(isbn), validate_certificate = False, deadline = 10)
        content = reply.content
        
        if self.match_isbn:
            logging.debug("Matching ISBN: " + isbn.lower())
            return content.decode('utf-8').lower().find(isbn.lower()) >= 0
        else:
            logging.debug("Matching: " + self.missing.lower())
            return re.search(self.missing.lower(), content.lower()) is None

    def validateLink(self):
        if not '#{ISBN}' in self.link:
            raise Exception("Link does not contain required #{ISBN} token")
    
    def validate(self):
        problems = ''
        try:
            self.validateLink()
            have = self.process(self.have)
            dont = self.process(self.dont)
            if not have:
                problems += "Existing item not found!\n"
            if dont:
                problems += "Missing item found!\n"
        except Exception, ex:
            problems = "%s" % (ex,)
        
        if problems == '':
            problems = None
            
        self.problems = problems
        self.has_problems = problems is not None
        self.tested = datetime.utcnow()
        
        if self.has_problems:
            self.problem_count += 1
        else:
            self.problem_count = 0
            self.was_valid = self.tested
            
        return problems
    
    @staticmethod
    def buildSearchTerm(text):
        search_terms = []

        # Split into individual words
        words = re.split('\W+', text.lower())
        
        # For each word "foo", add all prefixes ["f", "fo", "foo"]        
        for word in words:
            for i in range(1, len(word) + 1):
                search_terms.append(word[:i])

        # Remove duplicate terms
        return sorted(set(search_terms))
    
    @staticmethod
    def create(lib):        
        this = Library(
            key_name = lib["name"],
            title = lib["title"],
            name = lib["name"],
            link = lib["link"],
            have = lib["have"],
            dont = lib["dont"],
            is_community = lib["is_community"],
            )
        if lib.has_key("match_isbn"):
            this.match_isbn = True
        else:
            this.missing = lib["missing"]
        
        return this

    @staticmethod
    def resolve(name):
        q = Library.all()
 
        q.filter("name", name)
        
        libs = q.fetch(1)
        return libs[0] if len(libs) > 0 else None
    
    @staticmethod
    def find(title):
        q = Library.all()
 
        q.filter("title", title)
        
        libs = q.fetch(1)
        return libs[0] if len(libs) > 0 else None
    
    @staticmethod
    def search(query, limit=100, offset=0):
        q = Library.all()
 
        search = re.split('\W+', query.lower())
        search = sorted(set(search))
        for term in search:
            q.filter("text_search", term)
            
        q.order("title")
        
        return q.fetch(limit, offset)
    
    @staticmethod
    def count_search(query, kind, sort, limit=100, offset=0, problems=False):
        query = query.strip()
        sort = sort or "title"
        key = False
            
        if query == '' and sort == 'title' \
                            and limit <= 100 and offset == 0 and not problems:
            key = "Library:count_search/%s/%d" % (kind or "", limit)

        logging.debug("Library:default_search: %s" % (key or "FALSE"))
        
        if key:
            result = memcache.get(key)
            logging.debug("[%s]: %s" % (key, "HIT" if result else "MISS"))
            if result is not None:
                return result
    
        q = Library.all()
        if problems:
            if problems == '-1':
                q.filter("has_problems", False)
            elif problems == '1':
                q.filter("has_problems", True)
            else:
                q.filter("has_problems_3", True)
        
        if kind and kind != "":
            q.filter('is_community', kind == 'community')
        
        if query != '': 
            search = re.split('\W+', query.lower())
            search = sorted(set(search))
            
            for term in search:
                q.filter("text_search", term)
        
        q.order(sort)
        
        result = {
                "offset" : offset,
                "limit" : limit,
                "count" :  q.count(10000),
                "query" : query,
                "sort" : sort,
                "kind" : kind,
                "problems" : problems,
        }
        
        logging.debug("count_search: %s" % (result,))
        
        result["items"] = q.fetch(limit, offset)

        if key:
            memcache.set(key, result)
            
        return result
        
    @staticmethod
    def count_all():
        key = "Library:count_all"
        value = memcache.get(key)
        if value is not None:
            return value
            
        q = Library.all()
        value = q.count(10000)
        
        memcache.set(key, value)
        return value

    @staticmethod
    def invalidate_cache():
        memcache.flush_all()
