# Create your views here.
from rotiadm.feed import parser
from rotiadm.app.models import *
import datetime
from django.http import HttpResponse
import logging
import urllib
from django.db import connection

def fetch(request, resource_id):
    resource = Resource.objects.get(pk=resource_id)
    try:
        saves = fetch_resource(resource)
        return HttpResponse("{[%s]}" % (','.join(saves)))

    except parser.ParseError,e:
        return HttpResponse("%s" % (e.msg,))


def fetch_resource(resource):

    entries = parser.parse(resource.feed_url)

    resource.last_updated = datetime.datetime.today()
    resource.save()

    saves = []
    for entry in entries:
        logging.debug("entry title %s was fetch"  % (entry['title'],))
        oldEntry = Entry.objects.filter(link=entry['link'])
        if oldEntry:
            oldEntry = oldEntry[0]
            if (entry['published_date'] - oldEntry.published_date).seconds > 5:
                # update case
                tosave = oldEntry
            else:
                continue
        else:
            # insert case
            tosave = Entry()

        tosave.link = entry['link']
        tosave.content = entry['content']
        tosave.title = entry['title']
        tosave.tags = entry['tags']
        tosave.published_date = entry['published_date']
        tosave.last_updated = datetime.datetime.today()
        tosave.resource = resource
        tosave.indexed = False

        tosave.save()
        saves.append(str(tosave.id))

    return saves

def batch_fix_unindexed():
    # bug our bitField cannot be corrected in both query and update case.
    # so we quick fix by using raw sql in this case
    # entries = Entry.objects.filter(indexed=False)
    # for entry in entries:
    cursor = connection.cursor()
    cursor.execute("select id from entry where indexed = false")
    while True:
        eid = cursor.fetchone()
        if eid is None:
            break
        ret = urllib.urlopen("http://localhost/roti-0.1/index/entry/%s" % (eid[0],))
        logging.debug("post to roti %s with ret = %s" % (id, ret.read()))

def batch_fetch(days, size):
    base_line = datetime.datetime.today() - datetime.timedelta(days=days)
    resources = Resource.objects.filter(last_updated__lte=base_line).order_by('-last_updated')[:size]
    cnt = len(resources)
    logging.debug("found %d resources that need to update" % (cnt,))
    for resource in resources:
        try:
            saves = fetch_resource(resource)
            for id in saves:
                ret = urllib.urlopen("http://localhost/roti-0.1/index/entry/%s" % (id,))
                logging.debug("post to roti %s with ret = %s" % (id, ret.read()))
        except (parser.ParseError,LookupError), e:
            logging.debug("error %s while fetch feed of resource id %d " % (e, resource.id))
            resource.auto_fetch = False
            resource.save()



def test(days, size):
    base_line = datetime.datetime.today() - datetime.timedelta(days=days)
    resources = Resource.objects.filter(last_updated__lte=base_line).order_by('-last_updated')[:size]
    for resource in resources:
        logging.debug("try to fetch %s" % (resource.url,))
        try:
            entries = parser.parse(resource.feed_url)
            for entry in entries:
                logging.debug("entry - %s" % (entry['link'],))
                
        except parser.ParseError , e:
            logging.debug("error %s while fetch feed of resource id %d " % (e, resource.id))
            
            