#!/usr/bin/python
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Controller for handling queries."""

import datetime
import logging
import sys
import traceback
from xml.dom import minidom
from controllers import xmltojson
from controllers import merge
from controllers import rssmunge
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.ext import db
from google.appengine.ext import webapp
from django.utils import simplejson
import wsgiref.handlers


urls = {'viccfa':
        'http://osom.cfa.vic.gov.au/public/osom/IN_COMING.rss',
        'nswrfs':
        'http://www.rfs.nsw.gov.au/feeds/majorIncidents.xml',
        'tastfs':
        'http://www.fire.tas.gov.au/mysite/Show?pageId=colBushfireSummariesRss',
        'sacfs':
        '',
       }

CACHE_TTL = 300  # seconds
FETCH_TTL = 240  # seconds

MEMCACHE_RAW_KEY = 'rawdata2'
MEMCACHE_KEY = 'data2'
MEMCACHE_UPDATE_KEY = 'data_updating'

MEMCACHE_ALL_KEY = 'merged'

################################################################################


class CachedData2(db.Model):
  """Records some data with a timestamp."""
  rawdata = db.BlobProperty()
  agency = db.StringProperty()
  data = db.BlobProperty()
  date = db.DateTimeProperty(auto_now_add=True)


class Agency(db.Model):
  agency = db.StringProperty()
  last_updated = db.DateTimeProperty()
  url = db.StringProperty()


def GetData(agency):
  """Get most recent data from store (not cache)."""
  query = CachedData2.all().filter('agency =', agency).order('-date')
  cached_data = query.get()
  logging.debug('cached data for %s: %r', agency, cached_data)
  if cached_data:
    return cached_data
  else:
    return None


def PutData(agency, data):
  """Put data into the store and cache."""
  logging.info('putting agency: %s - %d bytes', agency, len(data))
  memcache.add(agency + MEMCACHE_KEY, data, CACHE_TTL + 60)
  memcache.add(agency + MEMCACHE_RAW_KEY, data, CACHE_TTL + 60)

  cached_data = CachedData2(data=data, agency=agency)
  cached_data.put()
  if agency != 'merged':
    agency_object = Agency.all().filter('agency =', agency).get()
    if not agency_object:
      agency_object = Agency(agency=agency, url=urls.get(agency))
    agency_object.last_updated = datetime.datetime.utcnow()
    agency_object.put()
    MergeData()


def MergeData():
  """Store a merged form of all feeds."""
  feeds = [(agency, GetData(agency)) for agency in urls.keys()]
  feeds = [(x[0], x[1].data) for x in feeds if x[1]]
  merged = merge.MergeFeeds(feeds)
  PutData(agency='merged', data=merged)


def DeleteData(agency):
  """Delete data from store and cache."""
  query = CachedData2.all()
  results = query.get()
  if results:
    db.delete(results)
  memcache.delete(agency + MEMCACHE_KEY)
  memcache.delete(agency + MEMCACHE_RAW_KEY)


class MergeHandler(webapp.RequestHandler):
  """Fetch the data if needed from the CFA, else from the memcache."""

  def get(self):
    MergeData()


class QueryFire(webapp.RequestHandler):
  """Fetch the data if needed from the CFA, else from the memcache."""

  def get(self):
    """GET handler."""
    agency = self.request.get('agency')
    fmt = self.request.get('fmt')
    flush = self.request.get('flush')  # flush the memcache

    if agency not in urls and agency != 'merged':
      return self.response.out.write('Unknown agency: ' + agency)

    url = urls.get(agency)

    if flush and flush == 'true':
      logging.debug('Flushing cache')
      memcache.delete(agency + MEMCACHE_KEY)
      memcache.delete(agency + MEMCACHE_RAW_KEY)

    now = datetime.datetime.now()
    if self.request.get('forcefetch'):
      content = None
    else:
      content = memcache.get(agency + MEMCACHE_KEY)

    #fetch_time = memcache.get('fetchTime')
    #if fetch_time:
      #fetch_delta = now - fetch_time
      #logging.debug('fetch_time %s fetch_delta %s %s'%(fetch_time,
                                                       #.fetch_delta.seconds,
                                                       #FETCH_TTL))
    #else:
      #logging.debug('no fetch_time')

    refetch = False
    if not refetch and not content and not self.request.get('forcefetch'):
      # get the persistent data unless it's time to refetch anyway
      persistent_data = GetData(agency=agency)
      if persistent_data:
        content = persistent_data.data
      # else refetch

    if url and (refetch or not content):
      logging.info('refetch is %r, content is %r', refetch, content)
      try:
        result = urlfetch.fetch(url, follow_redirects=False)
        logging.debug('Retrieving URL: ' + url)

        if result.status_code == 200:
          # refresh datastore and cache
          try:
            # Test that it's valid XML
            minidom.parseString(result.content)
            # check the cache one last time
            memcache.set('fetchTime', now)
            memcache.delete(MEMCACHE_UPDATE_KEY)
            if not self.request.get('forcefetch'):
              content = memcache.get(agency + MEMCACHE_KEY)
            if refetch or not content:
              logging.info('Refreshed content: %s (%d bytes)',
                           url, len(result.content))
              orig_content = result.content
              if agency == 'viccfa':
                content = rssmunge.MungeRSS(
                    orig_content, forcefetch=self.request.get('forcefetch')
                    ).encode('utf-8')
              elif agency == 'tastfs':
                content = rssmunge.MungeTasRSS(orig_content).encode('utf-8')
                logging.info('updating TFS RSS to georss %d -> %d',
                             len(orig_content), len(content))
              else:
                content = orig_content
              PutData(agency, content)
              memcache.delete(MEMCACHE_UPDATE_KEY)
          except Exception, e:
            e, v, t = sys.exc_info()
            logging.warn('Fetched bad XML; reverting to persistent data'
                         ', exception: %s: %s', e, v)
            logging.warn('traceback: ' + '\n'.join(traceback.format_tb(t)))

        else:
          logging.warn('Could not fetch: %s (status %s != 200)',
                       url, result.status_code)

      except Exception, e:
        e, v, t = sys.exc_info()
        logging.warn('urlfetch exception while fetching: %s exception %s: %s',
                     url, e, v)
        logging.warn('traceback: ' + '\n'.join(traceback.format_tb(t)))

    if not content:
      # revert to persistent data, if any
      persistent_data = GetData(agency=agency)
      if persistent_data:
        content = persistent_data.data
      else:
        logging.warn('Fetch failed and no persistent data')
        self.error(404)

    if agency == 'viccfa':
      if fmt == 'txt':
        logging.debug('Parsing xml')
        dom = minidom.parseString(content)
        rss_node = dom.documentElement
        assert rss_node.tagName == 'rss'

        channel_node = rss_node.childNodes[1]
        assert channel_node.tagName == 'channel'

        node_list = channel_node.getElementsByTagName('title')

        # count element nodes
        item_cnt = 0
        for node in node_list:
          if node.nodeType != node.TEXT_NODE: item_cnt += 1

        self.response.headers.add_header('Cache-control',
                                         'no-cache, must-revalidate')
        self.response.headers['Content-Type'] = 'text/html'
        self.response.out.write('%d fire items found in RSS feed\n' % item_cnt)

        item_cnt = 0
        for node in node_list:
          if node.nodeType != node.ELEMENT_NODE:
            continue
          item_cnt += 1
          assert node.tagName == 'title'
          title_string = node.firstChild.data
          self.response.out.write('%d: %s\n' % (item_cnt, title_string))

        dom.unlink()

      else:
        self.response.headers.add_header('Cache-control',
                                         'no-cache, must-revalidate')
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.out.write(content)

    else:
      # Reformat output as JSON. TODO: Memcache this.
      if fmt == 'json':
        content = simplejson.dumps(xmltojson.XmlToJson(content), indent=2)
        content = '\n'.join([l.rstrip() for l in content.splitlines()])

      self.response.headers.add_header('Cache-control',
                                       'no-cache, must-revalidate')
      self.response.headers['Content-Type'] = 'text/html'
      self.response.out.write(content)

################################################################################


def main():
  app = webapp.WSGIApplication([
      ('/query/fire', QueryFire),
      ('/query/merge', MergeHandler),
      ], debug=True)
  wsgiref.handlers.CGIHandler().run(app)

if __name__ == '__main__':
  main()
