#!/usr/bin/python2.5
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import urlfetch 
from google.appengine.ext import db
from google.appengine.api import taskqueue
from google.appengine.api import memcache

import os
import logging
import simplejson
import timezones  
import adminemail
import settings

from models import *
from datetime import *
from pytz.gae import pytz 
from updatetasks import *

class UrlFetcher(webapp.RequestHandler):
    
    def fetchUrl(self,url, method, payload=None):
        fullUrl = settings.SERVICE_URL + url 
        try:
            return self.fetchUrlInternal(fullUrl, method, payload)
        except Exception, ex:            
            adminemail.ReportError("URL Fetch", ex)
            logging.error("Fetch failed")
            raise
    
    def fetchUrlInternal(self,url, method, payload=None):        
        logging.debug("Fetching %s using %s" % (url, method))
        
        result = urlfetch.fetch(url = url,
                                method = method,
                                payload = payload,
                                deadline = 60) # 1 minute timeout. This is run in a task, so it could be more then 10 seconds (10 minutes is the offline limit)
                    
        if result.status_code == 200:
            #parse json result
            jsonresult = simplejson.loads(result.content)                
            message = "Response result: %s" % str(jsonresult["Result"])                                                                                               
            logging.debug(message)
            if jsonresult["Result"] == "OK":
                return jsonresult        
            
        # if we got here, there was a problem
        message = "URL fetch failed with code %s, error %s" % (result.status_code, result.content)
        logging.error(message)
        raise Exception(message) 
    
class CheckUpdates(UrlFetcher):
    """
    This task handler retrieves file list from mail relay server
    """
    def get(self): 
        return 
        logging.debug("Checking updates task entered")
        url = "Check.aspx"
        result = self.fetchUrl(url = url,
                          method = "GET") 
        
        periodicalsToUpdate = []        
        periodicals = Periodical.allPeriodicals()
            
        for fileUpdate in result["FileUpdates"]:             
            self.updatePeriodical(fileUpdate, periodicals, periodicalsToUpdate)
        
        if periodicalsToUpdate:
            db.put(periodicalsToUpdate)
            Periodical.clearCache()
    
    def updatePeriodical(self, fileUpdate, periodicals, periodicalsToUpdate):
        filename = fileUpdate["FileName"]       
        periodical_name = os.path.splitext(filename)[0]
        keyName = string.replace(filename.lower(),'.','_')
        
        fileUpdated = datetime.strptime(fileUpdate["LastUpdated"], '%Y-%m-%d %H:%M:%S') #naive format            
        fileUpdatedUTC = timezones.toUTC(fileUpdated, "Europe/Prague")
        
        if periodicals:
            results = filter(lambda periodical: periodical.key().name() == keyName, periodicals)
        if periodicals and results:
            periodical = results[0]
            utcTZ = pytz.utc
            perUpdatedUTC= utcTZ.localize(periodical.last_update)
        else:
            periodical = None
             
        if periodical:
            if perUpdatedUTC != fileUpdatedUTC:
                logging.debug("Updating %s", keyName)
                periodical.last_update = fileUpdatedUTC
                periodical.filesizeKB = fileUpdate["SizeInKb"]
                periodical.put()  
                periodicalsToUpdate.append(periodical)
            return 
        
        # not found in current periodicals, new record
        logging.debug("Creating %s", keyName)
        periodical = Periodical.get_or_insert(key_name = keyName, 
                                              name=periodical_name,
                                              filename = filename,
                                              last_update = fileUpdatedUTC,
                                              filesizeKB = fileUpdate["SizeInKb"])
        periodicalsToUpdate.append(periodical)
        return True
        
class Send(UrlFetcher):  
    """
    This task handler contacts mail relay server to send an email subscription batch
    """
        
    def getQueue(self, periodical):
        # get mail queue
        # limit for amazon is now 15!!!
        FETCHLIMIT = 14 # bcc limit for gmail is 100, just to be on the safe side
        
        query = MailQueue.all()    
        query.ancestor(periodical)
        query.filter("delivered = ", None) # not yet sent subscriptions
        query.order("inserted") # get the oldest first, FIFO queue
        
        queue = query.fetch(FETCHLIMIT)
        return queue
    
    def processQueue(self, queue, periodical):
        # collect emails, issue fetch and save back into DB
        emails = []    
        now = datetime.now() 
        for queueItem in queue:
            queueItem.delivered = now
            if not queueItem.email in emails: # skip duplicates
                 emails.append(queueItem.email)
        
        if (len(emails) < 1):
            logging.debug("No emails to send")
            return 
        
        # send mails to relay service
        # create JSON request
        logging.debug("Sending subscription info to relay service. Email count %i" % len(emails) )
        json_obj = {}
        json_obj["Filename"] = periodical.key().name()
        json_obj["Recipients"] = emails    
        json_str = simplejson.dumps(json_obj)
        
        url = "Send.aspx"
        result = self.fetchUrl(url = url,
                          method = "POST",
                          payload = json_str) 
        
        db.put(queue) # save queue
    
    def get(self):        
        logging.debug("Sending mails task entered")       
        
        # get associated periodical
        keyName = self.request.get('keyName')        
        periodical = Periodical.get_by_key_name(keyName)
        if periodical is None:            
            raise Exception("Periodical %s not found!", keyName)        

        queue = self.getQueue(periodical)             
        #db.run_in_transaction(self.processQueue, queue, periodical)
        self.processQueue(queue, periodical)
    
        
class ProcessQueue(webapp.RequestHandler):
    """
    Go through mail queue and issue send tasks if needed
    """  
    def get(self):
        logging.debug("Entering process queue job")
        for periodical in Periodical.allActive():             
            query = MailQueue.all()    
            query.ancestor(periodical)
            query.filter("delivered = ", None) # not yet sent subscriptions
            
            if query.get() is not None:
                # there's at least one mail to send
                logging.debug("Enqueuing send task for %s" % periodical.name)
                taskParams = {}
                taskParams["keyName"] = periodical.key().name() 
                taskqueue.add(queue_name = "mailqueue",
                          url='/queue/send',
                          method='GET',
                          params = taskParams
                          )
       
class ProcessSubscriptions(webapp.RequestHandler):
    """
    Go through subscriptions and add to mail queue
    """  
    def get(self):
        FETCHLIMIT = 100
        logging.debug("Entering process subscriptions job")
        
        for periodical in Periodical.allActive():                       
            # get associated subscriptions
            query = Subscription.all()
            query.ancestor(periodical)
            query.filter("send_next <= ", datetime.now()) # due subscriptions 
            
            subscriptions = query.fetch(FETCHLIMIT)
            queue = []
        
            for subscription in subscriptions:
                # skip if already sent
                if subscription.send_last is not None and subscription.send_last > periodical.last_update:
                    continue
                
                # add to queue
                mailQueue = MailQueue(parent = subscription, 
                                      email = subscription.user_settings.email,
                                      user_settings = subscription.user_settings, 
                                      periodical = subscription.periodical) # create new settings
                subscription.send_last = datetime.now()
                subscription.setNextSend()                
                queue.append(mailQueue)  
                
            if len(queue) > 0:
                logging.debug("Entered %i mails into %s mail queue" % (len(queue), periodical.name))
                db.run_in_transaction(self.insertIntoQueue, subscriptions, queue)
         
    def insertIntoQueue(self, subscriptions, queue):            
        db.put(subscriptions)
        db.put(queue)
        
class UpdateStats(webapp.RequestHandler):
    """
    Update periodicals statistics
    """  
    def get(self):
        
        logging.debug("Entering update statistics job")
        
        for periodical in Periodical.allPeriodicals():                       
            PeriodicalStats.updatePeriodicalsStats(periodical)
            
        # delete cached info
        memcache.delete(settings.MEM_STATS)   
        
class PurgeMailQueue(webapp.RequestHandler):
    """
    Deletes old mail queue records
    """  
    def get(self):
        
        logging.debug("Entering mail queue purge job")
        
        d = timedelta(hours=-72)
        from_date = datetime.now() + d

        query = MailQueue.all()  
        query.filter("inserted < ", from_date)          
        query.order("inserted")  
        res = query.fetch(100)
        
        if res:       
            logging.debug("Purging %i mail queue items" % len(res)) 
            db.delete_async(res)
            
class ProcessSubscriptions2(webapp.RequestHandler):
    """
    Processes all subscriptions
    """   
    def get(self):
        logging.debug("Entering process subscriptions job")
        update = UpdatePeriodical()        
        for periodical in Periodical.allActive():   
            # process subscriptions
            update.sendDeferred(periodical.key().name())    
        
class ListTasks(webapp.RequestHandler):     
    """
    Quickly show common taks handlers links
    """  
    def get(self):    
        self.response.clear()
        self.response.set_status(200)
        self.response.out.write("<a href=""https://appengine.google.com/dashboard?app_id=kindlerize"">Dashboard</a><br>")
        self.response.out.write("<a href=""/cron/checkupdates"">Check Updates</a><br>")
        self.response.out.write("<a href=""/cron/subscriptions"">Process Subscriptions</a><br>")
        self.response.out.write("<a href=""/cron/queue"">Process Queue</a><br>")
        self.response.out.write("<a href=""/cron/stats"">Update Stats</a><br>")
        self.response.out.write("<a href=""%sCheck.aspx"">Service check</a><br>" % settings.SERVICE_URL)
        #self.response.out.write("<a href=""%sCheck.aspx"">Service backup check</a><br>" % settings.SERVICE_URL_BACKUP)
        
def main():
    url_map = [('/cron/checkupdates', CheckUpdates),
               ('/cron/subscriptions', ProcessSubscriptions),
               ('/cron/queue', ProcessQueue),  
               ('/cron/stats', UpdateStats),                
               ('/queue/send', Send),
               ('/cron/purgequeue', PurgeMailQueue),
               ('/cron/subscriptions2', ProcessSubscriptions2),  
               ('/listtasks', ListTasks)]
    
    application = webapp.WSGIApplication(url_map, debug=True)
    run_wsgi_app(application)

if __name__ == '__main__':
    main()