#!/usr/bin/env python

import os, sys, urllib2, json
import pprint, datetime, time
import sqlite3 as sqlite
from optparse import OptionParser

# Configuration
optp = OptionParser()
optp.add_option('-c', '--channel', default=[], action='append',
                help='Specify channel(s)')
optp.add_option('-d', '--days', default=1, type='int',
                help='Specify number of days')
optp.add_option('-p', '--publisher', default=[], action='append',
                help='Specify publisher(s)')

# Some simple tests
api_url    = 'http://atlas.metabroadcast.com/3.0'
cache_path = 'cache'

# Log
start = time.time()
def log ( msg ):
  s = '%0.3f' % (time.time() - start)
  print >>sys.stderr, '%9s - %s' % (s, msg)

class ZeroLength ( Exception ):
  def __init__ ( self, msg ):
    Exception.__init__(self, msg)

class InvalidLength ( Exception ):
  def __init__ ( self, msg ):
    Exception.__init__(self, msg)

# Brand info
BRANDS = {}
def get_brand ( uri ):
  global brand_db

  # Already cache
  if uri in BRANDS: return BRANDS[uri]

  # In DB cache
  sql = 'select * from brands where id="%s"' % uri
  cur = brand_db.cursor()
  cur.execute(sql)
  res = cur.fetchall()
  if res:
    #log('    using db cache')
    ret = Brand()
    ret.id          = res[0][0]
    ret.title       = res[0][1]
    ret.description = res[0][2]
    ret.image       = res[0][3]
    ret.thumb       = res[0][4]

  # Fetch
  else:
    log('    fetch brand %s' % uri)
    ret = Brand(uri)
    sql = 'insert into brands values ("%s","%s","%s","%s","%s")' % (ret.id, ret.title, ret.description, ret.image, ret.thumb)
    cur.execute(sql)
  brand_db.commit()

  # Store and return
  BRANDS[uri] = ret
  return ret

class Brand:
  def __init__ ( self, uri = None ):
    self.id          = None
    self.title       = None
    self.description = ''
    self.image       = ''
    self.thumb       = ''

    # Fetch
    if uri:
      url = api_url + '/content.json?uri=' + uri
      log('      url = %s' % url)
      up  = urllib2.urlopen(url)
      dat = up.read()
      dat = json.loads(dat)
      dat = dat['contents']
      if not len(dat): raise Exception('invalid brand')
      
      # Process
      self.id          = dat['uri']
      self.title       = dat['title']
      if 'description' in dat:
        self.description = dat['description']
      if 'image' in dat:
        self.image       = dat['image']
      if 'thumbnail' in dat:
        self.thumb       = dat['thumbnail']

  def __str__ ( self ):
    return self.title

  def __hash__ ( self ):
    return hash(self.id)

# Programme
class Programme:
  def __init__ ( self, data ):
    self.id       = None
    self.title    = None
    self.subtitle = None
    self.summary  = None
    self.start    = None
    self.stop     = None
    self.brand    = None
    self.series   = None
    self.episode  = None

    # Extract
    self.title = data['title']
    if 'description' in data:
      self.summary  = data['description']
    self.id       = data['curie']
    self.start    = self.p_time(data['broadcasts'][0]['transmission_time'])
    self.stop     = self.p_time(data['broadcasts'][0]['transmission_end_time'])
    if 'container' in data and 'uri' in data['container']:
      self.brand    = get_brand(data['container']['uri'])
    if 'series_number' in data:
      self.series   = data['series_number']
    if 'episode_number' in data:
      self.episode  = data['episode_number']
  
    # Validate duration
    dur   = int(data['broadcasts'][0]['duration'])
    delta = int((self.stop - self.start).total_seconds())
    if not dur: raise ZeroLength('zero')
    #if dur != delta: raise InvalidLength('%d != %d' % (dur, delta))

  # parse time
  def p_time ( self, tm ):
    # TODO: all times are Zulu?
    return datetime.datetime.strptime(tm, '%Y-%m-%dT%H:%M:%SZ')

  # format time
  def f_time ( self, tm, date = False ):
    fmt = '%H:%M'
    if date: fmt = '%Y-%m-%d ' + fmt
    return tm.strftime(fmt)

  def __str__ ( self, indent = '', brand = True ):
    ret = indent
    if self.brand and brand:
      ret = ret + self.brand.title + ' : '
    ret = ret + self.title
    ep  = ''
    if self.episode:
      ep = '%4d' % self.episode
    if self.series:
      ep = '%2d.%4s' % (self.series, ep)
    tm = ''
    if self.start and self.stop:
      tm = '%s - %s' % (self.f_time(self.start, True), self.f_time(self.stop))
    ret = '%s%7s %22s %s' % (indent, ep, tm, ret)
    #if self.summary:
    #  ret = ret + '\n%s  %s' % (indent, self.summary)
    return ret

  def __hash__ ( self ):
    r = hash(self.id)
    return r

  def __eq__ ( self, other ):
    return hash(self) == hash(other)

  def __cmp__ ( self, other ):
    return cmp(self.start, other.start)

# Process schedule
def process_programmes ( data ):
  ret = []
  for p in data['schedule'][0]['items']:
    try:
      ret.append(Programme(p))
    except ZeroLength: pass
    except Exception, e:
      log(str(e))
  return ret

# Sort by brand
def sort_brands ( data ):
  ret = { 'unknown' : set() }
  for p in data:
    if not p.brand:
      ret['unknown'].add(p)
    elif not p.brand in ret:
      ret[p.brand] = set([p])
    else:
      ret[p.brand].add(p)
  return ret

# Output brands
def output_brands ( data ):
  for b in data:
    print 'Brand: %s' % b
    for p in data[b]:
      try:
        print p.__str__('  ')
      except Exception, e:
        log('ERROR: failed to process %s' % p.title)
        log(str(e))

# Parse options
(opts,args) = optp.parse_args()

# Build URL root
sched_url = api_url   + '/schedule.json?apiKey=%s' % api_key
sched_url = sched_url + '&from=now&to=now.plus.%dh' % (opts.days * 24)
sched_url = sched_url + '&publisher=%s' % ','.join(opts.publisher)

# Setup cache
if not os.path.exists(cache_path):
  os.makedirs(cache_path)

# Load brands cache
brand_db = os.path.join(cache_path, 'brands.db')
if not os.path.exists(brand_db):
  brand_db = sqlite.connect(brand_db)
  c        = brand_db.cursor()
  sql      = 'create table brands (id text, title text, description text, image text, thumb text)'
  c.execute(sql)
  brand_db.commit()
else:
  brand_db = sqlite.connect(brand_db)

# Process each channel
for c in opts.channel:
  log('processing channel %s' % c)

  # Fetch data
  log('  fetching data ...')
  url  = sched_url + '&channel=%s' % c
  log('    url = %s' % url)
  data = urllib2.urlopen(url).read()

  # Cache
  log('  caching data ...')
  open('%s/%s.in'  % (cache_path, c), 'w').write(data)

  # JSON decode
  log('  decoding json ...')
  data = json.loads(data)
  pprint.pprint(data, open('%s/%s.fmt' % (cache_path, c), 'w'))

  # Process data
  log('  processing programmes ...')
  prog = process_programmes(data)

  # Output
  log('  outputting ...')
  print c
  for p in prog:
    print p.__str__('  ')
  print ''

  # Output by brand
  brand = sort_brands(prog)
  for b in brand:
    print b
    t = list(brand[b])
    t.sort()
    for p in t:
      print p.__str__('  ', False)
