#coding:utf-8

import socket
socket.setdefaulttimeout(20)


import feedparser
#from pmap import pmap
#from multiprocessing.dummy import Pool

from rss_option import getUri,getData
from future import Future
from rss_log import init

_log = init("rss")

def process(Rs, Uri):

	print 'Uri Num',Uri.size()

	def _parse(uri):
		def pparse(uri):
			return feedparser.parse(uri)
		try:
			data = pparse(uri)
			#return (data,uri)
		except Exception,e:
			_log.error(uri+" "+str(e))
			#return (None,uri)
			data = None
		_log.info(uri)
		return data

	# P = Pool(32)
	# Es = P.map(_parse,Rs)

	Ds = [ (Future(_parse, r),r) for r in Rs]
	Es = [ (e(),r) for e,r in Ds ]
	Es = [ (e,r) for e,r in Es if e != None ]
	#Es = pmap(_parse, Rs)
	print 'Parse Done'

	def _is_in(m,r):
		i = getUri(m,r)
		uri = r+" "+i
		if Uri.is_in(uri):
			return True
		else:
			return False

	def _extract(er):
		e = er[0]
		r = er[1]
		ms = [m for m in e['items'] if not _is_in(m,r)]
		#print len(ms),r
		return (ms,r)

	Es = [ _extract(er) for er in Es ]
	#Es = pmap(_extract, Es)
	#P = Pool(32)
	#Es = P.map(_extract, Es)

	Es = [ (ms,r) for ms, r in Es if len(ms) > 0]

	def _process(ms,r):
		ns = [ {'rss':r, 'data':getData(m,r)} for m in ms ]
		return ns

	Ns = [ (_process(ms,r)) for ms,r in Es ]
	Ns = [ n for ns in Ns for n in ns]

	print 'Collect', len(Ns)
	return Ns

