import feedparser
import pickle
import time
from config import config

class Rss:
 def addurl(self, jid, url):
    f = open(config['data'], 'rb')
    data = pickle.load(f)
    f.close()
    f = open(config['data'], 'wb')
    if jid in data.keys():
      data[jid].append({ 'url' : url,'lastpost': (2000, 4, 3, 1, 0, 0, 1, 94, -1)})
    else:
      data[jid] = [{'url':url, 'lastpost': (2000, 4, 3, 1, 0, 0, 1, 94, -1)}]
    pickle.dump(data, f)
    f.close()

 def listfeeds(self, jid):
    f = open(config['data'], 'rb')
    data = pickle.load(f)
    f.close()
    if jid in data.keys():
      return data[jid]
    else: 
      return []

 def remove(self, jid, id):
    f = open(config['data'], 'rb')
    data = pickle.load(f)
    f.close()
    data[jid].pop(id)
    f = open(config['data'], 'wb')
    pickle.dump(data, f)
    f.close()

class Reader(object):
  def read(self):
    ### reading data file
    file = open(config['data'], 'rb')
    data = pickle.load(file)
    file.close()
    ### end reading
    ### reading rss's
    result = dict()
    for jid in data.keys():
      for post in data[jid]:
        d = feedparser.parse(post['url'])
        d['items'] = d['items'][:5]
        last_date_post = post['lastpost']


        # definiendo un filtro para usar en el map
        result_urls = []
        last_date = last_date_post
        for x in d['items']:
          data_date = x.updated_parsed

          if (data_date > last_date_post):
            result_urls.append(x)
          if (data_date > last_date):
            last_date = data_date

        result[jid] = result_urls
        i = data[jid].index(post)
        data[jid][i]['lastpost'] = last_date
    ### end read rss
    ### writing last post datetime
    file = open(config['data'], 'wb')
    pickle.dump(data, file)
    file.close()
    ### end writing
    return result 

  def stop(self):
    self.on = False
