#!/usr/bin/python
"""muxer.py 
Create a list of feeds and then mix them together into one super feed.
@author Philip Cadigan < phil@inkhorn.org >
@date 3/24/2009
"""
# ---------- CUSTOMIZE FOR YOUR SITE ----------   


# for each feed you wish to add, simply put the URL in quotes followed by a comma below.
feed_list = [] # ex: ["http://feeds.feedburner.com/inkhorn/ZPyO","http://del.icio.us/rss/pcad",]

# the meta information for your feed.
meta = {}
meta['title'] = ""        # ex: "The inkhorn.org feed"
meta['link'] =  ""        # ex: "http://inkhorn.org"
meta['feedLink'] = ""     # ex: 'http://inkhorn.org/feed/'
meta['description'] = ""  # ex: "philip cadigan's wonderful world of inkhornism"
meta['pubDate'] = format_date(gmtime())# now!
meta['generator'] = "feedmux"

# the name of the file where your new feed will live
feed_file = "new.xml" # ex: /home/me/web/rss.xml

# ---------- for basic functionality you shouldn't need to touch this ----------   

import feedparser
from operator import itemgetter
import os
import stat
import sys
import traceback
from time import gmtime, strftime

def muxer(site_list):
  """Takes a list of site feeds and returns a time sorted list of feeds."""
  entries = []
  for site in site_list:
      for entry in site.entries:
          entries.append(entry)
  new_entries = sorted(entries, key=itemgetter('updated_parsed'))
  new_entries.reverse()
  return new_entries

def rss2(entries, meta):
  head = """<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"
          xmlns:content="http://purl.org/rss/1.0/modules/content/"
          xmlns:wfw="http://wellformedweb.org/CommentAPI/"
          xmlns:dc="http://purl.org/dc/elements/1.1/"
          xmlns:atom="http://www.w3.org/2005/Atom"
          ><channel>"""
  foot = "</channel></rss>"
  meta = """<title>%s</title>
            <atom:link href="%s" rel="self" type="application/rss+xml" />
            <link>%s</link>
            <description>%s</description>
            <pubDate>%s</pubDate>
            <generator>%s</generator>
            <language>en</language>""" % (meta['title'], meta['feedLink'], meta['link'], meta['description'], meta['pubDate'], meta['generator'])
  body = ""
  for entry in entries:
    item = "<item>"
    item += "<title><![CDATA[%s]]></title>" % val_from_entry(entry, "title")
    item += "<link><![CDATA[%s]]></link>" % val_from_entry(entry, "link")
    item += "<pubDate>%s</pubDate>" % format_date(val_from_entry(entry, "created_parsed", "updated_parsed"))
    item += "<dc:creator>%s</dc:creator>" % val_from_entry(entry, "author")
    #print val_from_entry(entry, "tags")
    cats = get_cats(entry)
    if cats:
      item += "<category><![CDATA[%s]]></category>" % ",".join(cats)
    item += '<guid isPermaLink="false"><![CDATA[%s]]></guid>' % val_from_entry(entry, "feedburner_origlink", "link")
    item += '<description><![CDATA[%s[...]]]></description>' % val_from_entry(entry, "summary")
    content = val_from_entry(entry, "content")
    if content:
      content = content[0].value
    item += '<content:encoded><![CDATA[%s]]></content:encoded>' % content
    item += "</item>"
    body += item
  return head + meta + body + foot

def get_cats(entry):
  tags = val_from_entry(entry, "tags")
  if not tags:
    return []
  bits = []
  for tag in tags:    
    if not tag.has_key('term'):
      continue
    else:
      if tag.has_key('scheme') and tag['scheme'] != None:
        tmp_bits = tag['term'].split(tag['scheme'])
      else:
        tmp_bits = tag['term'].split()
      for bit in tmp_bits:
        bits.append(bit)
  return bits

def val_from_entry(entry, *args):
  for arg in args:
    try:
      val = getattr(entry, arg)
      return val
    except AttributeError:
      pass
  return ""

def format_date(date_ts):
  """2009-01-19T15:09:53Z"""
  return strftime("%a, %d %b %Y %H:%M:%S +0000", date_ts)   


if __name__ == "__main__":
  try:
    # you shouldn't need to edit below this portion
    parsed_feeds = []
    for feed in feed_list:
      parsed_feeds.append(feedparser.parse(feed))
    new_entries = muxer(parsed_feeds)
    st = rss2(new_entries, meta)
    f = file(feed_file, "w")
    f.write(st.encode("utf-8"))
    f.close()
    os.chmod(feed_file, 0644)#stat.S_IREAD|stat.S_IWRITE|stat.S_IRGRP|stat.S_IROTH  
  except Exception, e:
    print traceback.print_exc()
    sys.exit(1)
