'''
Created on Jan 17, 2010

@author: roman

$Id: feedreader.py 27 2010-01-24 20:30:38Z sigizmund@gmail.com $
'''

import re
import time
import pprint
import logging
import urllib2 as u2
from datetime import datetime
from django.utils import simplejson   
from google.appengine.runtime import DeadlineExceededError
from google.appengine.ext import db   
from google.appengine.api.labs import taskqueue

import twitter
import urlextractor
from utils import isImageHosting
from model.twitterstatus import TwitterStatus 
from model.lasttwitterid import LastTwitterId

    
findUrlsRe = re.compile('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&#+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')

class RedirectHandler(u2.HTTPRedirectHandler):
    def __init__(self, reporter):
        self.reporter = reporter 

    def redirect_request(self, req, fp, code, msg, hdrs, newurl):
        logging.debug('redirect_request %s -- %s' % (msg, newurl))
        self.reporter.setUrl(newurl)
        raise u2.HTTPError(newurl, 0, msg, hdrs, fp)

class Reporter:
    def setUrl(self, url):
        self.url = url

def _unshortUrl(shortUrl):
    logging.debug("Unshortening URL %s" % shortUrl)
    r = Reporter()
    opener = u2.build_opener(RedirectHandler(r))
    
    fp = None
    
    try:
        opener.open(shortUrl)
    except u2.HTTPError, e:
        print(e)
        return r.url
    
    return shortUrl

def _parse_datetime(str):
	# We must parse datetime this way to work in python 2.4
	return datetime(*(time.strptime(str, '%a %b %d %H:%M:%S +0000 %Y')[0:6]))

def checkUsers():
    users = ['sigizmund', 'hd42', 'vicchi', 'siddhuw'] # tough luck guys
    
    allLastIds = LastTwitterId.all()
    
    # first we need to make sure that every user indeed has a valid LastTwitterId
    
    userToLastId = dict()
        
    for lastId in allLastIds:
        userToLastId[str(lastId.user)] = lastId
        if str(lastId.user.lower()) in users:
            users.remove(str(lastId.user.lower()))
    
    for u in users:
        logging.debug("No lastId: %s" % u)
    
    for u in users:
        lastId = LastTwitterId(user = u.lower(), statusId = '1')
        lastId.save()
        userToLastId[u] = lastId
    
    # by now every user has got a valid last Tweet ID

def checkUserUpdates(lastUpdateKey):
    class RedirectHandler(u2.HTTPRedirectHandler):
        pass
    opener = u2.build_opener()
    lastId = LastTwitterId.get(lastUpdateKey)
    
    logging.debug("Fetching feed for user %s, last time obtained at %s" % (lastId.user, lastId.obtainedAt))

    timeline = twitter.UserTimeline(username = str(lastId.user.lower()), since_id = str(lastId.statusId))
    updates = None
    
    try:
        updates = timeline.getLastUpdates()
    except:
        return # next time maybe

    logging.debug("I received %d updates for user %s" % (len(updates), lastId.user))

    for u in updates:
        if len(TwitterStatus.all().filter('statusId = ', str(u['id'])).fetch(1)) > 0:
            continue

        objectFormatted = pprint.pformat(u)
        logging.debug("Next status is: \n %s" % objectFormatted )
        
        status = TwitterStatus(originalText = u['text'], user = lastId.user.lower(), statusId = str(u['id']), json = simplejson.dumps(u))
        status.createdAt = _parse_datetime(u['created_at'])
        shortUrls = urlextractor.extract(u['text'])
    
        urls = ['not_ready']
        
        status.urls = urls
        logging.debug(urls)
        status.save()

    if len(updates) > 0:
        lastId.statusId = str(updates[0]['id'])
        lastId.save() 
        taskqueue.add(url = '/tasks/urlexpander', params = {'username' : str(lastId.user.lower())})
    

def readFeed():
    # let's ensure we have lastId for all our users
    checkUsers()
    
    # now let's go and check them
    
    q = LastTwitterId.all()
    q.order('obtainedAt')
    
    
    
    # we always take top ten which were not updated for longer 
    results = q.fetch(10)
    
    for lastId in results:
        checkUserUpdates(lastId.key())


def fetchOldUpdates(username = None):  
    if username == None:
        ids = LastTwitterId.all()
        ids.filter('hasBacklog = ', False)
        ids.order('-obtainedAt')
        u = ids.fetch(1)
    
        if len(u) == 0:
            logging.debug("No users to fetch old stuff, exiting...")
            return
        else:
            username = u[0].user
        
        u[0].hasBacklog = True # not saving yet
    else:
        ids = LastTwitterId.all()
        ids.filter('user = ', username)
        u = ids.fetch(1)
    
    logging.info("Fetching old tweets for %s" % username)
    
    def callback(tweets):
        for u in tweets:
            q = TwitterStatus.all()
            q.filter('statusId = ', str(u['id']))
            
            if len(q.fetch(5)) > 0:
                continue

            shortUrls = findUrlsRe.findall(u['text'])
            if len(shortUrls) == 0:
                continue
            
            status = TwitterStatus(originalText = u['text'], user = username.lower(), statusId = str(u['id']), json = simplejson.dumps(u))
            status.createdAt = _parse_datetime(u['created_at'])
            
            status.urls = ['not_ready']
            
            try:
                status.save()
            except UnicodeDecodeError, e:
                logging.error("Unicode issue")
                logging.error(e)
            
    
    timeline = twitter.UserTimeline(username = username)
    try:
        timeline.getOldUpdates(callback = callback)
        u[0].save()
    except DeadlineExceededError:
        u[0].save()
      
def reprocessStatuses(statuses):
    for u in statuses:
        shortUrls = urlextractor.extract(u.originalText)
        urls = []
        for su in shortUrls:
            if isImageHosting(su):
                urls.append(su)
                continue
            
            logging.debug("Trying short URL %s" % su )
            
            try:
                longUrl = _unshortUrl(su)
                logging.debug("Resolved to long URL %s" % longUrl )
                urls.append(longUrl.decode('utf-8'))
            except Exception, e:
                logging.error(e)
                urls.append(su)
    
        u.urls = urls
        u.save()
      

def reprocessUrls(username = None):
    q = TwitterStatus.all()
    q.filter('urls = ', 'not_ready')
    if username != None:
        q.filter('user = ', username)
    
    statuses = q.fetch(100)
    reprocessStatuses(statuses)

def reprocessAllUrls():
    q = TwitterStatus.all()
    statuses = q.fetch(10000)
    reprocessStatuses(statuses)    
    