from dateutil.relativedelta import *
from dateutil.easter import *
from dateutil.rrule import *
from dateutil.parser import *
from datetime import *

import re
from hashlib import md5
from google.appengine.api import memcache
import logging
from logging import error, info, debug
from google.appengine.api.labs import taskqueue

from models import Feed, FeedContent
from google.appengine.api import urlfetch
from google.appengine.api.urlfetch import InvalidURLError, DownloadError
from google.appengine.runtime import  DeadlineExceededError 
from google.appengine.runtime.apiproxy_errors import InterruptedError
import feedparser

import wsgiref.handlers
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template


class Spider(webapp.RequestHandler):
   def get(self, period = "normal"):

	if period == "normal":
		feed = Feed.all().filter( "weight <", 3)	
	elif period == "3":
		feed = Feed.all().filter( "weight >=", 3)	

	step = 5
	k = 0
	results = range(step)

	print "<pre>"

	while len(results) == step: 
		
		tmp = []
		results = feed.fetch(step, k)
		for result in results:
			tmp.append(result.url)
			print result.key()
		taskqueue.add(url = "/worker", 
			params={'results':",".join(tmp) })
		k = k + step
		

class Worker(webapp.RequestHandler):
   def post(self):
	rpcs = []

	def handle_result(rpc,url):
	  try:
	     result = rpc.get_result()
	  except DownloadError:
	     return
	  except InvalidURLError:
	     return
	  except DeadlineExceededError:
	     return
	  
	  except InterruptedError:
	     return

	  p = feedparser.parse(result.content)

	  try:	  
	    info( p['feed']['title'].encode("UTF-8"))
	    feed = Feed().all().filter("url =", url).fetch(2)
	    info(feed)
	    for e in p['entries']:
		    if  e.has_key('content'):
				content = e.content[0].value
		    elif e.has_key('summary_detail'):
				content = e.summary_detail.value
		    elif e.has_key('value'):
				content = e.value
		    else:
				info(e)
				content = "empty"

		    feedDate = ''
		    if e.has_key( 'updated' ):
				feedDate = e.updated
		    if e.has_key('pubDate'):
				feedDate = e.pubDate 
				
		    hash = md5(unicode(str(feed[0].key()) + e.title + content + feedDate).encode("utf8")).hexdigest()
			

		    previous = FeedContent.all().filter("hash =",hash).fetch(5)

		    if len(previous) == 0:
			    info(feedDate)
			    feedDate = parse( feedDate )
			    feedcontent = FeedContent(feed = feed[0],
						title = e.title,
						url = e.link,
						content = content,
						date = feedDate,
						hash = hash
						)
			    feedcontent.put()
			    
			  
	  except KeyError:
	    error( type(p['feed']['subtitle']))


	# Use a helper function to define the scope of the callback.
	def create_callback(rpc, url):
	  return lambda: handle_result(rpc, url)

	results = self.request.get("results")
	error(results)
	results = results.split(",")

	for result in results:
		rpc = urlfetch.create_rpc()
		rpc.callback = create_callback(rpc, result)
		urlfetch.make_fetch_call(rpc, result )
		rpcs.append(rpc)

	# ...
	for rpc in rpcs:
		rpc.wait()   

def main():
  logging.getLogger().setLevel(logging.DEBUG)
  wsgiref.handlers.CGIHandler().run(webapp.WSGIApplication([
    ('/spider/(.*)', Spider),
    ('/spider', Spider),
    ('/worker', Worker)
  ]))


if __name__ == '__main__':
  main()

