#!/usr/bin/env python
# encoding: utf-8
"""
Tracker.py

'Clean' referrer tracking using JavaScript.

Created by Rui Carmo on 2007-03-21.
Published under the MIT license.
"""
from snakeserver.snakelet import Snakelet
import os, sys, time, rfc822, unittest, urlparse, urllib, re, sha
from Utils import *
import Locale

# List of referrals to ignore
# (web aggregators, search engines, etc. that tend to cause too many hits)
ignorelist = ["^(\w+)\.google\.", "^(pesquisa.clix.pt|pesquisa.sapo.pt|www.prt.sc|prt.sc)",
"^(127|10|192\.168)\.+",
"^(search.creativecommons.org|search.msn.com|a9.com|www.metacrawler.com)",
"^(www.bloglines.com|bloglines.com|www.newsgator.com)",
"^(findory.com|www.dogpile.com|wwww.ask.com|www.answers.com)"]


class Referrals(Snakelet):
  """
  Referral Tracker
  """
  
  def init(self):
    s = self.getContext()
    ac = self.getAppContext()    
    s.refkeys = {}
    ac.referrers = ReferrerCache(ac,ignorelist)
  
  def getDescription(self):
    return "Referral Tracker"

  def allowCaching(self):
    return False
    
  def requiresSession(self):
    return self.SESSION_NOT_NEEDED
  
  def serve(self, request, response):
    request.setEncoding("UTF-8")
    response.setEncoding("UTF-8")
    # pattern is /track/action/key/url
    s = self.getContext()
    ac = self.getAppContext()
    referrer = request.getReferer()
    #if(preg_match(SITE_REGEXP, $_SERVER["HTTP_REFERER"])) {
    try:
      (dummy,action,key,url) = request.getFullQueryArgs().split('/',3)
    except:
      response.setResponse(404, "Not Found")
      return
    # invoked from <script type="text/javascript" src="/track/key"></script>
    if action == "key":
      now = time.time()
      # create new entry
      refkey = sha.new(str(now)).hexdigest()
      # store the key, the time and the local site page we were called from
      (schema,host,path,parameters,query,fragment) = urlparse.urlparse(referrer)
      s.refkeys[refkey] = (now,path[len(ac.base):])
      # wipe out anything older than 30 seconds
      for i in s.refkeys.keys():
        (then,referrer) = s.refkeys[i]
        if (now - 30) > then:
          del s.refkeys[i] 
      response.setHeader("Content-Type",'text/plain')
      response.getOutput().write('"%s"' % refkey)
      return
    elif action == "do":
      # split referrer URL 
      (schema,netloc,path,parameters,query,fragment) = urlparse.urlparse(url)
      for i in ignorelist:
        if re.match(i,netloc):
          schema = 'skip'
      # Only track URLs with valid keys and schemas
      if re.match('^[a-f0-9]{40}$',key) and schema in ['http','https']:
        # If we know this key
        if key in s.refkeys.keys():
          (now,page) = s.refkeys[key]
          # Check if this is an internal referrer - we may have a siteurl override parameter
          try:
            (schema,host,path,parameters,query,fragment) = urlparse.urlparse(ac.siteinfo['siteurl'])
          except:
            (schema,host,path,parameters,query,fragment) = urlparse.urlparse(request.getBaseURL())
          if host != netloc:
            ac.referrers.add(url,urllib.unquote(page),now)
      # Keep the remote browser happy
      self.getWebApp().serveStaticFile(self.getWebApp().getDocRootPath() + "/img/1x1t.gif", response, useResponseHeaders=False)
      return
    elif action == "dump":
      #response.getOutput().write(str(ac.referrers.data))
      response.getOutput().write(str(ac.referrers.dump()))
      return
    response.setResponse(404, "Not Found")
  
class ReferrerCache:
  def __init__(self, ac, ignorelist = []):
    self.ac = ac
    # self.data has to be initialized upon first add, since the persistent cache may not have been instanced yet
    self.data = None
    self.ignorelist = ignorelist
    self.mtime = time.time()
    
  def getData(self):
    if self.data is None:
      try:
        self.data = self.ac.persistent['tracker:referrers']
      except:
        self.data = {}    
    
  def add(self, referrer, page, now):
    referrer = referrer.strip()
    page = page.strip()
    self.getData()
    if(referrer == '' or page == ''):
      return
    for i in self.ignorelist:
      if re.match(i, referrer):
        return
    if page in self.data.keys():
      if referrer in self.data[page]['referrers']:
        self.data[page]['referrers'][referrer]['count'] = self.data[page]['referrers'][referrer]['count'] + 1
        self.data[page]['referrers'][referrer]['mtime'] = now
      else:
        self.data[page]['referrers'][referrer] = {'count':1, 'mtime':now}
      self.data[page]['mtime'] = now
    else:
      self.data[page] = {'mtime': now,'referrers': {referrer:{'count':1, 'mtime':now}}}

    for page in self.data.keys():
      if self.data[page]['mtime'] < (now - 3600*24):
        del self.data[page]
      for referrer in self.data[page]['referrers']:
        if self.data[page]['referrers'][referrer]['mtime'] < (now - 3600*24):
          del self.data[page]['referrers'][referrer]
        if len(self.data[page]['referrers']) == 0:
          del self.data[page]
    # store a checkpoint of referrer information every half an hour
    if (self.mtime + 1800) < now:
      self.ac.persistent['tracker:referrers'] = self.data
      self.mtime = now

  def dump(self):
    # this _should_ be moved to a plugin to ensure better independence between data and
    # rendering, but Snakelets makes it easy to edit, debug and reload a single module 
    # at runtime
    i18n = Locale.i18n[self.ac.locale]
    self.getData()
    buffer = ['<table class="referrers"><tr><th>Page</th><th>Referrers (Hits)</th></tr>']
    pages = self.data.keys()
    # sort pages by decreasing timestamp of last hit
    pages.sort(lambda b, a: cmp(self.data[a]['mtime'], self.data[b]['mtime']))
    for page in pages:
      page = urllib.unquote(page)
      try:
        row = ['<tr><td><a href="%s" title="%s">%s</a></td><td class="referring_urls">' % (self.ac.base + page,timeSince(i18n,self.data[page]['mtime']),self.ac.indexer.pageinfo[page]['title'])]
        referrers = self.data[page]['referrers'].keys()
        referrers.sort(lambda b, a: cmp(self.data[page]['referrers'][a]['mtime'], self.data[page]['referrers'][b]['mtime']))
        for referrer in referrers:
          row.append('<a href="%s">%s</a>&nbsp;(%d)<br/>' % (referrer,shrink(referrer,40),self.data[page]['referrers'][referrer]['count']))
        buffer.append(''.join(row)[:-5]) # append current partial row trimming trailing <br/>
        buffer.append('</td></tr>')
      except:
        print "Error processing referrers for %s" % page
        pass
    buffer.append('</table>')
    return ''.join(buffer)
