import feedparser
import strip
import string
import pickle
import sys
import calendar
import md5
import time
from sys import exit


def stripNoPrint(str):
	results = ""
	for char in str:
		if string.printable.find(char):
			results += char			
	return results


if __name__ == "__main__":
	if len(sys.argv) < 2:
		print "nofeed"
		exit(1)
	##i need at least one parameter
	et = {}
	try: 
		f=open("pickled.bin")
		et = pickle.load(f)
		f.close()
		#et has a dictionary which has feed:lastnewsdate pairs
	except:
		pass
	try:
		fd = feedparser.parse(sys.argv[1])
	except:
		print feedbad
		exit(1)
	if et.has_key(sys.argv[1]):
		lastnewsdate=et[sys.argv[1]]
	else:
		lastnewsdate=0 ##nothing hashes to 0
	if(len(fd.feed)==0):
		print "feedbad"
		exit(1)
	if not hasattr(fd.feed,"title"):
		print "feedbad"
		exit(1)
	print "feedok"
	print stripNoPrint(fd.feed.title.encode("utf-8","ignore"));
	print stripNoPrint(fd.href.encode("utf-8","ignore"));
	l=limit = len(fd.entries);
	for i in range(l):
		if md5.new(fd.entries[i].summary.encode("utf-8","ignore")).hexdigest()==lastnewsdate:
			limit=i
	s=range(limit)
	s.reverse()
	for i in s:
		print stripNoPrint(fd.entries[i].title.encode("utf-8","ignore"))
		#print fd.entries[l-i-1].author
		#print fd.entries[l-i-1].link
		if(hasattr(fd.entries[i],"updated_parsed")):
			print calendar.timegm(fd.entries[i].updated_parsed)
		else:
			print calendar.timegm(time.gmtime())
		print strip.strip(stripNoPrint(fd.entries[i].summary.encode("utf-8","ignore"))).replace('\\','\\\\').replace("\n","\\n")	
	et[sys.argv[1]]=md5.new(fd.entries[0].summary.encode("utf-8","ignore")).hexdigest()
	f=open("pickled.bin","wb")
	#print "despues de esto va et"
	#print et
	pickle.dump(et, f, True)
		
		
		

		



