'''
Created on 2009-11-3

@author: wangyongtao
'''
from google.appengine import runtime
from google.appengine.ext import webapp
from google.appengine.api import images
from google.appengine.ext import db
from google.appengine.api.labs import taskqueue
from google.appengine.ext.webapp.util import run_wsgi_app
import urlparse
import spider
import mokomodel
import logging

_DEBUG = True
_PER_CRAWLER_MODEL_COUNT = 10
class CronPage(webapp.RequestHandler):
    def get(self, action):
        action = action.lower()
        logging.debug('action: %s'%action)
        if action == 'model':
            self._cronModel()
        elif action == 'gallery':
            self._cronGallery()
        elif action == 'album':
            self._cronAlbum()
        elif action == 'imgsize':
            self._cronImageSize()
        elif action == 'log':
            self._cronLog()
        elif action == 'filter':
            self._cronFilter()
        elif action == 'photourl':
            self._cronPhotoUrl()
        elif action == 'avatarurl':
            self._cronAvatarUrl()
        elif action == 'albumurl':
            self._cronAlbumUrl()
        else:
            logging.info('unknown action')
            l = mokomodel.cronlog(action='unkown action', content = action, successed = False)
            l.put()
        
    def _cronModel(self):
        models = mokomodel.cronlog.get_non_crawled_models(_PER_CRAWLER_MODEL_COUNT/2)
        if models:
            try:
                for m in models:
                    logging.info('crawler model:%s'%m.username)
                    spider.fetch_model(m.username)
                    taskqueue.add(url='/task/gallery', params={'model': m.username}, method='GET')
                    mokomodel.cronlog.add_crawler_model(m.username, True)
            except runtime.DeadlineExceededError, e:
                logging.error('cron task: crawler model has met deadline exceeded error:%s'%e.message)
        else:
            logging.info('all models have been crawled')
   
    def _cronFilter(self):
        models = mokomodel.cronlog.get_non_filtered_models(2)
        if models:
            try:
                for m in models:
                    logging.info('filter model:%s'%m.username)
                    taskqueue.add(url='/task/filtermodel', params={'model': m.username}, method='GET')
                    mokomodel.cronlog.add_filter_model(m.username, True)
            except runtime.DeadlineExceededError, e:
                logging.error('cron task: filter model has met deadline exceeded error:%s'%e.message)
        else:
            logging.info('all models have been crawled')
            
    def _cronLog(self):
        uselessLogs = mokomodel.cronlog.get_useless_logs(_PER_CRAWLER_MODEL_COUNT*25)
        if uselessLogs:
            try:
                db.delete(uselessLogs)
                logging.info('Successful delete useless logs items: %d'%len(uselessLogs))
            except runtime.DeadlineExceededError, e:
                logging.error('cron task: delete useless logs has met deadline exceeded error:%s'%e.message)
        else:
            logging.info('There is no useless logs in data store')
            
    def _cronGallery(self):
        models = mokomodel.cronlog.get_non_crawled_gallery(_PER_CRAWLER_MODEL_COUNT)
        if models:
            logging.info('models count: %d'%len(models))
            try:
                for m in models:
                    logging.debug('Crawler gallery:%s'%m.username)
                    result = spider.fetch_gallery(m.username)
                    mokomodel.cronlog.add_crawler_gallery(m.username, result)
            except runtime.DeadlineExceededError, e:
                logging.error('cron task: fetch gallery has met deadline exceeded error:%s'%e.message)
        
    def _cronPhotoUrl(self):
        photos = mokomodel.cronlog.get_non_updated_photos(_PER_CRAWLER_MODEL_COUNT*10)
        if photos:
            logging.info('start update photo url with %d photos'%len(photos))
            try:
                for p in photos:
                    urlc = urlparse.urlparse(p.url)
                    if urlc.netloc.find(':')> -1 and urlc.port is not None:
                        #found non-80 port in url, we need to transform the url
                        logging.info('found invalid url:%s'%p.url)
                        netloc = urlc.netloc.split(':')[0]
                        url = urlparse.urlunsplit((urlc.scheme, netloc, urlc.path, urlc.query, urlc.fragment))
                        p.url = url
                        p.put()
                        logging.info('updated url:%s'%url)
                    #add log for updated photo
                    mokomodel.cronlog.add_updated_photo(p)
            except runtime.DeadlineExceededError, e: 
                logging.error('cron task: update photo url has met deadline exceeded error:%s'%e.message)  
    def _cronAlbumUrl(self):
        albums = mokomodel.cronlog.get_non_verified_albums(_PER_CRAWLER_MODEL_COUNT*10)
        if albums:
            logging.info('start update photo url with %d albums'%len(albums))
            try:
                for p in albums:
                    urlc = urlparse.urlparse(p.cover)
                    if urlc.netloc.find(':')> -1 and urlc.port is not None:
                        #found non-80 port in url, we need to transform the url
                        logging.info('found invalid url:%s'%p.url)
                        netloc = urlc.netloc.split(':')[0]
                        url = urlparse.urlunsplit((urlc.scheme, netloc, urlc.path, urlc.query, urlc.fragment))
                        p.cover = url
                        p.put()
                        logging.info('updated url:%s'%url)
                    #add log for updated photo
                    mokomodel.cronlog.add_updated_album(p)
            except runtime.DeadlineExceededError, e: 
                logging.error('cron task: update album cover url has met deadline exceeded error:%s'%e.message)     
    
    def _cronAvatarUrl(self):
        photos = mokomodel.cronlog.get_non_updated_avatars(_PER_CRAWLER_MODEL_COUNT)
        if photos:
            logging.info('start update photo url with %d photos'%len(photos))
            try:
                for p in photos:
                    if p.avatar is not None:
                        urlc = urlparse.urlparse(p.avatar)
                        if urlc.netloc.find(':')> -1 and urlc.port is not None:
                            #found non-80 port in url, we need to transform the url
                            logging.info('found invalid url:%s'%p.avatar)
                            netloc = urlc.netloc.split(':')[0]
                            url = urlparse.urlunsplit((urlc.scheme, netloc, urlc.path, urlc.query, urlc.fragment))
                            p.avatar = url
                            p.put()
                            logging.info('updated url:%s'%url)
                        #add log for updated photo
                    mokomodel.cronlog.add_updated_avatar(p)
            except runtime.DeadlineExceededError, e: 
                logging.error('cron task: update avatar url has met deadline exceeded error:%s'%e.message)
                    
    
    def _cronAlbum(self):
        albums = mokomodel.cronlog.get_non_crawled_albums(_PER_CRAWLER_MODEL_COUNT)
        if albums:
            logging.debug('albums count: %d'%len(albums))
            try:
                for a in albums:
                    logging.debug('Crawler album:%s'%a.title)
                    result = spider.fetch_album(a.url)
                    mokomodel.cronlog.add_crawler_album(a.url, result)
            except runtime.DeadlineExceededError, e:
                logging.error('cron task: fetch album has met deadline exceeded error:%s'%e.message)
            
    def _cronImageSize(self):
        photos = mokomodel.cronlog.get_non_sized_photos(_PER_CRAWLER_MODEL_COUNT*2)
        if photos:
            logging.debug('photo count: %d'%len(photos))
            if len(photos) == 0:
                logging.debug('photo sizing task finished')
                return
            try:
                for p in photos:
                    try:
                        logging.debug('Sizing photo:%s'%p.url)
                        successed = spider.fetch_photo(p)
                        mokomodel.cronlog.add_sized_photo(p, successed)
                    except images.Error, e:
                        logging.error('cron task: get photo size has met image error:%s' % e.message)
            except runtime.DeadlineExceededError, e:
                logging.error('cron task: get photo size has met deadline exceeded error:%s'%e.message) 
        else:
            logging.info('no photo needs to be sized')           
                 
application = webapp.WSGIApplication([
            (r'/cron/(.*)', CronPage),
            ], debug=_DEBUG)

def main():
    run_wsgi_app(application)

if __name__ == "__main__":
    main()
        