import feedparser
import datetime
import calendar
import time
import logging
import logging.config
from urllib import urlopen
from BeautifulSoup import BeautifulSoup
from pymongo.connection import Connection
from pymongo.errors import DuplicateKeyError


mongoConnection = Connection()
db = mongoConnection.feeds
g = open('/home/ruxu/Templates/python/reader/out.html','wb')
time_now = datetime.datetime.now()


class retrieveArticles(object):

  def __init__(self):
    pass

  def flush_all(self):
    db.entries.remove({})

  def add_entry(self):
    pass



  def add_from_scratch(self):

    db.entries.ensure_index('url')
    cnt = 0
    for link in db.link.find():
      # link-ul preluat din mongo
      # time = datetime.datetime.now()
      line = link['link']
      db.link.update({'link':line}, {'$set':{'time':time_now}}, False, True)
      d = feedparser.parse(line)
      print d.feed.title

      #iau numarul de entry-uri din FEED
      length = len(d.entries)

      print 'ajung aici'
      #iau html-ul de la link-ul primit
      for i in range(0,length):
        '''
        print 'Titlu, link, data'
        print d.entries[i].title
        print d.entries[i].link
        # calendar.timegm e inversa lui time.gmtime
        print calendar.timegm(d.entries[i].updated_parsed)
        print d.entries[i].updated_parsed
        print time.gmtime(calendar.timegm(d.entries[i].published))
        print 'Detalii'
        print d.entries[i].summary_detail.type
        print 'Value'
        print d.entries[i].summary_detail.value
        print d.entries[i].summary_detail.language
        print 'Link-uri'
        print d.entries[i].link
        print d.entries[i].links[0]
        print d.entries[i].links[0].rel
        print d.entries[i].links[0].href
        '''
        published_time = calendar.timegm(d.entries[i].updated_parsed)
        title_published = d.entries[i].title

        url_published = d.entries[i].link

        try:
          cnt = cnt + 1
          print d.entries[i].title
          db.entries.insert({ '_id':d.entries[i].link, \
                              'title':d.entries[i].title, \
                              'url':d.entries[i].link, \
                              'type':d.entries[i].summary_detail.type, \
                              'published':calendar.timegm(d.entries[i].updated_parsed), \
                              'value':d.entries[i].summary_detail.value,\
                              'language': d.entries[i].summary_detail.language
                            })

          print 'Added feed '
        except DuplicateKeyError:
          print 'Mai exista deja intrarea'

        '''
        if (db.entries.find({'url':url_published}).count() > 0):
          print 'Intrare existenta'
        else:
          cnt = cnt + 1
          print d.entries[i].title
          db.entries.insert({ 'title':d.entries[i].title, \
                              'url':d.entries[i].link, \
                              'type':d.entries[i].summary_detail.type, \
                              'published':calendar.timegm(d.entries[i].published), \
                              'value':d.entries[i].summary_detail.value,\
                              'language': d.entries[i].summary_detail.language
                            })
        '''
        '''
        html = urlopen(d.entries[i].link).read()
        soup = BeautifulSoup(html)
        '''
        i = i + 1
        print i
    print 'Am introdus ', cnt, ' articole'

if __name__=='__main__':

  retriever = retrieveArticles()
  retriever.add_from_scratch()
  #retriever.flush_all()
