# Abstract a feed of podcasts.

import feedparser
import urlparse
import diskmgr
import urllib2
import db
from episode import Episode
import httplib
import os
import traceback
import time
import datetime
import pickle
from log import log, logdebug

def Sum(a, b):
  return a+b

def TrueToOne(a):
  if a: return 1
  return 0

class Feed:
  """Abstract an RSS-ish feed of podcast episodes."""
  DEFAULT_MAX_FILES = 5

  @staticmethod
  def Columns():
    return ["url", "etag", "last_modified", "last_parsed", "valid", "feed_id",
        "max_files", "stats", "title" ]

  def __init__(self, row):
    """Construct a feed from a database row."""
    logdebug("New feed from", row)
    self._url = row["url"]
    self._title = db.RowGet(row, "title", "Untitled")
    self._etag = db.RowGet(row, "etag", None)
    self._last_modified = datetime.datetime.fromtimestamp(
        db.RowGet(row, "last_modified", 0))
    self._last_parsed = datetime.datetime.fromtimestamp(
        db.RowGet(row, "last_parsed", 0))
    self._valid = db.RowGet(row, "valid", True)
    self._feed_id = row["feed_id"]
    self._max_files = db.RowGet(row, "max_files", Feed.DEFAULT_MAX_FILES)

    stats = db.RowGet(row, "stats", "")
    if stats:
      self._stats = pickle.loads(stats)
    else:
      self._stats = {}

    self._unsubscribing = False
    self._parsed_feed = None
    self._episodes = []
    self._episode_id_to_episode = {}
    for episode_row in db.Select("*", "episodes", {"feed_id": self._feed_id}):
      episode = Episode(episode_row)
      if episode.valid():
        self.IndexEpisode(Episode(episode_row))
      else:
        log("Deleting invalid episode", episode)
        episode.Remove()

    diskmgr.DeleteDanglingFiles(str(self._feed_id), self.EpisodeExists)

  @staticmethod
  def Create(url):
    values = {
        "url": url,
    }
    feed_id = db.Insert("feeds", values)
    return Feed({ "url": url, "feed_id": feed_id })

  def __str__(self):
    return "<Feed id=%s url=%s>" % (self._feed_id, self._url)

  def title(self):
    return self._title

  def set_valid(self, valid):
    self._valid = valid
    db.Update("feeds",
        { "valid": self._valid },
        { "feed_id": self._feed_id })

  def valid(self):
    """A feed is invalid if it's known to have been deleted."""
    return self._valid

  def unsubscribing(self):
    return self._unsubscribing

  def set_unsubscribing(self, unsubscribing):
    self._unsubscribing = unsubscribing

  def set_url(self, url):
    self._url = url

  def url(self):
    return self._url

  def set_etag(self, etag):
    self._etag = etag

  def etag(self):
    return self._etag

  def set_last_modified(self, last_modified):
    self._last_modified = last_modified

  def last_modified(self):
    return self._last_modified

  def last_parsed(self):
    return self._last_parsed

  def feed_id(self):
    return self._feed_id

  def stats(self):
    return self._stats

  def set_stat(self, key, value):
    log("Set stat", key, "to", value)
    self._stats[key] = value

  def incr_stat(self, key, by=1):
    if key not in self._stats:
      self._stats[key] = 0
    self._stats[key] += by

  def update_ma_stat(self, key, value, alpha=0.5):
    # Update a moving average stat. Keep alpha of the old value and average in
    # 1-alpha of the new value.
    if key in self._stats:
      self._stats[key] = alpha * self._stats[key] + (1 - alpha) * value
    else:
      self._stats[key] = value

  def update_windowavg_stat(self, key, value, window_size):
    history = self._stats.get(key, [])
    if len(history) >= window_size:
      history = history[1:]
    history.append(value)
    self._stats[key] = history

  def CommitStats(self):
    log("Commit stats for feed", self, ":", self._stats)
    db.Update("feeds", { "stats": pickle.dumps(self._stats) },
        {"feed_id": self._feed_id})

  def SetChosen(self):
    log("Set feed", self._feed_id, "chosen")
    self.set_stat("last_choose_time", int(time.time()))
    self.CommitStats()

  def SetListened(self, episode):
    """Update listened stats for the feed, mark the episode listened, and
    unlock it."""
    self.update_windowavg_stat("p_listen", 1, 7)
    if self._episodes and episode == self._episodes[-1]:
      # You just listened to the newest episode.
      print "You listened to the newest episode"
      self.update_ma_stat("dl_to_listen_newest_time",
          int(time.time() - episode.updated()), 0.8)
    self.CommitStats()
    episode.SetListenedAndUnlock()

  def SetSkipped(self, episode):
    self.update_windowavg_stat("p_listen", 0, 7)
    self.CommitStats()
    episode.SetListenedAndUnlock()

  def Parse(self):
    """Download a feed. Parse it. Update internal state.
    Returns True if the feed was parsable and sets parsed_feed_. Returns
    False if the feed was not parseable."""
    self._last_parsed = datetime.datetime.now()
    db.Update("feeds",
        {"last_parsed": time.mktime(self._last_parsed.timetuple())},
        {"feed_id": self._feed_id})

    args = {}
    if self.etag():
      args["etag"] = self.etag()
    if self.last_modified():
      args["modified"] = self.last_modified().timetuple()
    args["agent"] = "iTunes/4.7 (Macintosh; U; PPC Mac OS X 10.2)"

    self._parsed_feed = None
    try:
      log("Calling parse on url", self.url())
      parsed_feed = feedparser.parse(self.url(), **args)
    except:
      log("**EXCEPTION IN FEEDPARSER**")
      log(traceback.format_exc())
      return False
    bozo_exception = parsed_feed.get("bozo_exception", None)
    if bozo_exception:
      log("BOZO EXCEPTION:", bozo_exception)

    if "status" not in parsed_feed:
      log("No status. Smacks of network problems.")
      return False

    log("Parse status is", parsed_feed.status)
    if parsed_feed.status == 301:
      # Permanent redirect
      self.set_url(parsed_feed.href)

    if parsed_feed.status == 410:
      # Feed is gone
      self.set_valid(False)

    if parsed_feed.status >= 200 and parsed_feed.status < 400:
      self._parsed_feed = parsed_feed
      self.UpdateEntryList();
      return True
    else:
      return False

  def UpdateEntryList(self):
    if "entries" not in self._parsed_feed:
      log("No entries in feed.")
      return

    for entry in self._parsed_feed.entries:
      if "enclosures" not in entry:
        log("No enclosures in entry",entry)
        continue

      updated = datetime.datetime.fromtimestamp(
          time.mktime(entry.get("updated_parsed", None)))

      new_episode = True
      valid_episodes = []
      for enclosure in entry.enclosures:
        if "href" not in enclosure:
          log("Enclosure missing href")
          continue

        if "type" in enclosure:
          if enclosure.type != "audio/mpeg":
            log("Unknown enclosure type",enclosure.type)
            continue

        episode_row = {
            "title": entry.get("title", "No title"),
            "summary": entry.get("summary", ""),
            "updated": time.mktime(updated.timetuple()),
            "duration": entry.get("itunes:duration", None),
            "href": enclosure.href,
            "length": int(enclosure.get("length", None)),
            "episode_id": Episode.ComputeEpisodeId(entry.get("guid", None),
                                                   enclosure.href),
            "feed_id": self._feed_id,
            "listened": False,
            "filename": None
            }
        episode = Episode(episode_row)

        if not episode.valid():
          log("Episode not valid")
          continue

        if self.HaveEpisode(episode.episode_id()):
          log("Already have", episode)
          new_episode = False
          break

        valid_episodes.append(episode)

      if not new_episode or not valid_episodes:
        # Already have this one
        log("old episode or no valid enclosures.")
        continue

      self.Link(valid_episodes[0])
      self.set_etag(self._parsed_feed.get("etag", None))
      feed = self._parsed_feed.get("feed", None)
      if feed:
        self._title = feed.get("title", "Untitled")
        log("Set title to", self._title)
      if self._title == "Untitled":
        log("Failed to title", self.feed_id, ". Parsed feed:", self._parsed_feed)
      else:
        log("The title of", self.feed_id, "is", self._title)
      self._last_modified = datetime.datetime.fromtimestamp(
          time.mktime(self._parsed_feed.get("updated_parsed",
            datetime.datetime.now().timetuple())))
    db.Update("feeds",
        { "last_modified": time.mktime(self._last_modified.timetuple()),
          "etag": self.etag(),
          "title": self.title() },
        { "feed_id": self._feed_id })

  def HaveEpisode(self, episode_id):
    return episode_id in self._episode_id_to_episode

  def Unlink(self, episode):
    log("Unlinking", episode)
    episode.Remove()
    del self._episode_id_to_episode[episode.episode_id()]
    for i in range(len(episodes)):
      if episode.episode_id() == episodes[i].episode_id():
        del episodes[i]
        return

  def Link(self, episode):
    log("Linking", episode)
    episode.Insert()
    self.IndexEpisode(episode)

  def IndexEpisode(self, episode):
    logdebug("Indexing", episode)
    self._episodes.append(episode)
    self._episodes.sort()
    self._episode_id_to_episode[episode.episode_id()] = episode

  def FilenameToEpisodeId(self, filename):
    """Convert a filename "feed_id/episode_id" to "episode_id"."""
    parts = os.path.split(filename)
    return os.path.join(parts[1:])[0]

  def EpisodeExists(self, filename):
    episode_id = self.FilenameToEpisodeId(filename)
    if episode_id not in self._episode_id_to_episode:
      log("** DANGLING FILE **")
      log("episode id not known.")
      log("filename:", filename)
      log("episode id:", self.FilenameToEpisodeId(filename))
      log("known ids:", self._episode_id_to_episode.keys())
      return False

    episode = self._episode_id_to_episode[episode_id]
    if not episode.downloaded():
      log("** DANGLING FILE **")
      log("episode claims not to be downloaded (perhaps an interrupted dl?)")
      log("filename:", filename)
      log("episode id:", self.FilenameToEpisodeId(filename))
      return False

    return True

  def NumDownloadable(self):
    """Returns the number of episodes that have not been listened to and have
    not been downloaded."""
    if not self._episodes:
      return 0
    return reduce(Sum,
        map(TrueToOne, map(lambda e: e.Downloadable(), self._episodes)))

  def NumWithFiles(self):
    if not self._episodes:
      return 0
    return reduce(Sum,
        map(TrueToOne, map(lambda e: e.downloaded(), self._episodes)))

  def NumListened(self):
    if not self._episodes:
      return 0
    return reduce(Sum,
        map(TrueToOne, map(lambda e: e.listened(), self._episodes)))

  def NumEpisodes(self):
    return len(self._episodes)

  def DownloadEpisodes(self):
    log("Downloading episodes for ", self)
    log("Max files is", self._max_files)
    log("Num downloadable is", self.NumDownloadable())
    num_to_download = min(self._max_files, self.NumDownloadable())
    log("Would like to download", num_to_download)
    if not num_to_download:
      return

    space_available = self._max_files - self.NumWithFiles()
    log("Num slots available is",space_available)
    if space_available < num_to_download:
      log("Gotta make room for more files.")
      self.DeleteListenedFiles(num_to_download - space_available)
      space_available = self._max_files - self.NumWithFiles()

    num_to_download = min(num_to_download, space_available)
    log("Plan to download", num_to_download)

    if num_to_download == 0:
      log("No room for more downloads.")
      return

    log("Walking list of episodes in reverse to find download candidates.")
    episodes_copy = list(self._episodes)
    episodes_copy.reverse()
    for episode in episodes_copy:
      log("  Trying to download", episode, ". Have", num_to_download, "left")
      if num_to_download == 0:
        return
      if episode.Downloadable():
        log("  ...Episode is downloadable, giving it a go")
        if episode.Download() == episode.DOWNLOAD_OK:
          num_to_download -= 1
      else:
        log("  (not downloadable: %s)" % episode.WhyNotDownloadable())

  def DeleteListenedFiles(self, num):
    log("Would like to delete", num, "files")
    count = 0
    num_ineligible = 0
    for episode in self._episodes:
      logdebug("  Trying to delete", episode, ". Have deleted", count, "of",
          num)
      if count == num:
        return
      if episode.downloaded() and episode.listened():
        log("**Deleting episode's file.")
        episode.DeleteFile()
        count += 1
      else:
        num_ineligible += 1
    log("Deleted", count, "of", num, "desired.", num_ineligible,
        "were ineligible.")
  def UpToDate(self, max_age, now=None):
    last_parsed_timet = time.mktime(self._last_parsed.timetuple())
    if now is None:
      now = time.time()
    oldest_uptodate_timet = now - max_age
    return last_parsed_timet >= oldest_uptodate_timet

  def Remove(self):
    for episode in self._episodes:
      episode.Remove()
    diskmgr.DeleteDanglingFiles(str(self._feed_id), lambda filename: False)
    db.Remove("feeds", {"feed_id": self._feed_id})

  def GetEpisodeIDs(self):
    return list(self._episode_id_to_episode.keys())

  def GetEpisode(self, episode_id):
    try:
      return self._episode_id_to_episode[episode_id]
    except KeyError:
      return None

  def HasUnlistenedEpisodes(self):
    return self.GetNextUnlistenedEpisode() is not None

  def HasDownloadedEpisodes(self):
    return any(map(lambda episode: episode.downloaded(), self._episodes))

  def GetNextUnlistenedEpisode(self):
    episodes_copy = list(self._episodes)
    episodes_copy.reverse()
    for episode in episodes_copy:
      if not episode.listened() and episode.downloaded():
        return episode
    return None

db.PreInitSql(
    """create table if not exists feeds (
         url text not null,
         etag text,
         last_modified integer,
         last_parsed integer,
         valid integer,
         feed_id integer primary key autoincrement,
         max_files integer,
         stats text,
         title text
      )""")
