# coding: utf8

import docclass
from datetime import datetime
import twitter


def index():
    
    # define my classifier
    cl = docclass.naivebayes(docclass.getwords, db)
    
    docclass.sampletrain(cl)

    return dict(message="train.py completed.")
    

def train_favorite():

    # Get the latest id:
        
    res = db().select(db.twitter_favorite.ALL, orderby=~db.twitter_favorite.created_on).first()
    
    tapi = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret,
                  access_token_key=access_token, access_token_secret=access_token_secret, cache=None)
                  
    cl = docclass.naivebayes(docclass.getwords, db)                  
    
    if res:        
        fav = tapi.GetFavorites(since_id=res.twitter_id)
        logger.info('Training %d favorite(s)' %len(fav))
    else:
        logger.info('Training all favorites')
        fav = tapi.GetFavorites(count=100)  # max allowed 199
                   
                          
    for f in fav:        
        item = f.user.screen_name
        item += ' '
        item += f.text
        __save_favorite__(f)

        cl.train(item, 'favorite')
        db.commit()
        
    # Step 2: classify my stream line
    
    res = db().select(db.twitter_usertimeline.ALL, orderby=~db.twitter_usertimeline.created_on).first()
    
    if res:        
        tl = tapi.GetUserTimeline(twitter_user, since_id=res.twitter_id, include_rts=True)
        logger.info('Training %d user timeline(s)' %len(tl))
    else:
        logger.info('Training all user timeline')
        tl = tapi.GetUserTimeline(twitter_user, include_rts=True)
        
    for t in tl:
        item = t.text
        __save_timeline__(t)

        cl.train(item, 'favorite')
        db.commit()        
    
    # crear the cash:
    cache.ram.clear()
    
    return dict(message="twitter train completed.")


def __save_favorite__(item):
    
    db.twitter_favorite.insert(twitter_id= item.id,   
        twitter_created_at= datetime.strptime(item.created_at, '%a %b %d %H:%M:%S +0000 %Y'),
        text = item.text, 
        user_name=item.user.name, 
        user_screen_name=item.user.screen_name, 
        user_profile_image_url=item.user.profile_image_url)
        
def __save_timeline__(item):
    
    db.twitter_usertimeline.insert(twitter_id= item.id,   
        twitter_created_at= datetime.strptime(item.created_at, '%a %b %d %H:%M:%S +0000 %Y'),
        text = item.text, 
        user_name=item.user.name, 
        user_screen_name=item.user.screen_name, 
        user_profile_image_url=item.user.profile_image_url)
        


def cacheclassifier():

    logger.info('Classify latest post')

    cl = docclass.fisherclassifier(docclass.getwords, db)
    cl.setminimum('favorite', 0.89)
    
    tapi = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret,
                  access_token_key=access_token, access_token_secret=access_token_secret, cache=None)
    
    status = tapi.GetHomeStatus()

    for s in status:
    
        item = s.user.screen_name
        item += ' '
        item += s.text
        
        # classify the post with a ml technique
        # item_cat,rate = cl.classify(item)
        item_cat,rate = cache.ram(s.id, lambda: cl.classify(item), time_expire=cache_time) # cache for 1 hour

    logger.info('Classify completed')        
    
    return dict(message="classification completed.")
