#!/usr/bin/python 
#
# Caching JSON Proxy for Link 360.
#
# Godmar Back <godmar@gmail.com>, May 2009
#

from lxml import etree
from django.utils import simplejson
import re, urllib, time

# cache for 24 day
expiration = 24 * 3600
url2json = dict()
n_url2json = 0
max_url2json = 10000

# statistics
cachehits = 0
cacherequests = 0

import sys
# add directory in which script is located to python path
script_dir = "/".join(__file__.split("/")[:-1])
if script_dir == "":
    script_dir = "."
if script_dir not in sys.path:
    sys.path.append(script_dir)

from link360 import Link360JSON

def notfound(env, start_response):
    body = "Not Found, env = \n" \
       + ''.join(sorted([k + " -> " + str(v) + "\n" for k, v in env.items()]))

    params = dict([(urllib.unquote_plus(k), urllib.unquote_plus(v))
        for k, v in [kv.strip().split('=', 1) \
                     for kv in env['QUERY_STRING'].split('&') if '=' in kv]])

    body = body + "\nparams:\n" \
       + ''.join(sorted([k + " -> " + str(v) + "\n" for k, v in params.items()]))
    headers = [('Content-Type', 'text/plain'), \
               ('Cache-Control', 'max-age=1,must-revalidate')]
    start_response("404 Not Found", headers)
    return [body]

pathinfoformat = re.compile('/([^/]*)$')

def application(env, start_response):
    global n_url2json, max_url2json, url2json, expiration, cacherequests, cachehits
    params = dict([(urllib.unquote_plus(k), urllib.unquote_plus(v))
        for k, v in [kv.strip().split('=', 1) \
                     for kv in env['QUERY_STRING'].split('&') if '=' in kv]])

    m = pathinfoformat.match(env['PATH_INFO'])
    if not m:
        return notfound(env, start_response)

    sshash = m.group(1)
    if sshash == "stats":
        headers = [('Content-Type', 'text/plain;charset=utf-8'), \
                   ('Cache-Control', 'max-age=1,must-revalidate')]
        start_response("200 OK", headers)
        body = "%d entries cached, %d/%d cache hits" \
            % (n_url2json, cachehits, cacherequests)
        return [body]
        
    url = "http://%s.openurl.xml.serialssolutions.com/openurlxml?version=1.0&" % sshash
    url = url + env['QUERY_STRING']
    now = time.mktime(time.gmtime())

    # clear cache when max capacity is reached
    if n_url2json == max_url2json:
        url2json = dict()
        n_url2json = 0
        
    cacherequests += 1
    json = None
    if url2json.has_key(url):
        json = url2json[url]
        if json['retrieved'] + expiration < now:
            json = None
            del url2json[url]
            n_url2json -= 1
        else:
            json['cachehit'] = True
            cachehits += 1

    if not json:
        f = urllib.urlopen(url)
        converter = Link360JSON(etree.parse(f))
        f.close()
        json = converter.convert()
        json['retrieved'] = int(now)
        url2json[url] = json
        n_url2json += 1

    #tstamp = time.strptime(json['echoedQuery']['timeStamp'], "%Y-%m-%dT%H:%M:%S")
    body = simplejson.dumps(json)

    if params.has_key('jsoncallback'):
        body = params.get('jsoncallback') + "(" + body + ")"

    headers = [('Content-Type', 'application/javascript;charset=utf-8'), \
               ('Cache-Control', 'max-age=1,must-revalidate')]
    start_response("200 OK", headers)
    return [body]

if __name__ == '__main__':
    f = open(sys.argv[1])
    converter = Link360JSON(etree.parse(f))
    print simplejson.dumps(converter.convert())
    f.close()

