"""
This is a set of visualization routines to visualize the Star Wars Kid Data 
Dump which is hosted at: http://waxy.org/2008/05/star_wars_kid_the_data_dump/
"""
from __future__ import with_statement
import re, itertools, datetime
import numpy, pylab, matplotlib
from collections import defaultdict

#create and compule the regular expression outside of the loop to save time
ip_re = '(.*?) -.*?'
time_re = '\[(.*?) (-\w.*?)\] '
host_re =  '"(\w{1,}) (.*?)".*?'
ref_re = '"(.*?)" '
brow_re = '"(.*?)"'
CONST_RE = re.compile(ip_re+time_re+host_re+ref_re+brow_re)

#use this regular expression to split the date
SPLIT_RE = re.compile('[/:]')
MONTH_MAP = {'Apr': 4, 'May':5, 'Jun':6, 'Jul':7, 'Aug':8, 'Sep':9, 'Oct': 10, 
				'Nov':11, 'Dec':12, 'Jan':1}

DOMAIN_RE = re.compile('http://(.*?)/')


class LogClass():
	"""
	This class will contain the information relating to each record in the 
	Apache Log dumps. A few examples are listed below:
193.251.187.41 - - [12/Apr/2003:06:59:18 -0700] "GET / HTTP/1.1" 200 68627 "http://www.waxy.org/archive/2003/03/27/open_cdr.shtml" "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)"
193.251.187.41 - - [12/Apr/2003:06:59:27 -0700] "GET / HTTP/1.1" 200 68627 "http://www.waxy.org/archive/2003/03/27/open_cdr.shtml" "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)"
209.126.210.108 - - [12/Apr/2003:06:59:29 -0700] "GET /index.xml HTTP/1.1" 200 23099 "-" "Mozilla/4.0 (compatible; MSIE 5.5; Windows 98; Win 9x 4.90)"
193.251.187.41 - - [12/Apr/2003:06:59:41 -0700] "GET /archive/cat/piracy/index.shtml HTTP/1.1" 200 29417 "http://www.waxy.org/" "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)"
193.251.187.41 - - [12/Apr/2003:06:59:55 -0700] "GET /archive/cat/piracy/index.shtml HTTP/1.1" 200 29417 "http://www.waxy.org/" "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)"
69.10.137.199 - - [12/Apr/2003:07:00:00 -0700] "GET /watch-info HTTP/1.0" 200 73 "-" "-"
193.251.187.41 - - [12/Apr/2003:07:00:13 -0700] "GET /archive/cat/programming/index.shtml HTTP/1.1" 200 20636 "http://www.waxy.org/archive/cat/piracy/index.shtml" "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)"
	"""
	def __init__(self, LOG_LINE):
		this_match = CONST_RE.search(LOG_LINE)
		
		if this_match:
			
			self.ip = this_match.group(1)
			str_time = SPLIT_RE.split(this_match.group(2))
			self.time = datetime.datetime(int(str_time[2]),
										MONTH_MAP[str_time[1]],
										int(str_time[0]),
										int(str_time[3]),
										int(str_time[4]),
										int(str_time[5]))
			
			self.req = this_match.group(5)
			
			self.f_name = self.req[0:self.req.find(' ')].split('/')[-1]
			
			self.ref = this_match.group(6)
			self.browser = this_match.group(7)
			
			ref_match = DOMAIN_RE.search(self.ref)
			if ref_match:
				self.ref_domain = ref_match.group(1)
			else:
				self.ref_domain = 'UNKNOWN'
			
			
			
	def __hash__(self):
		return hash(self.ip+hash(self.time)+self.req+self.ref+self.browser)

		
def CheckStr(IN_STR):
	"""
	A function to help filter out the logs that I dont want.
	"""
	return IN_STR.find('star_wars') != -1
			
def DetDateNum(FIRST_DAY, THIS_DAY):
	return (THIS_DAY.time - FIRST_DAY.time).days
			
		
if __name__ == '__main__':
	
	
	
	#this will determine which unique filenames are in the log and give us a 
	#place to start.
	log_list = []
	ref_dict = defaultdict(list)
	
	
	with open('star_wars_kid.log') as handle:
		first_log = LogClass(handle.next())
		#use itertools.ifilter to filter out all lines which do not contain "star_wars"
		for this_line in itertools.ifilter(CheckStr, handle):
			this_log = LogClass(this_line)
			log_list.append(this_log)
			#keep track of the number of referrers
			ref_dict[this_log.ref_domain].append(this_log)
			
			
			
	
	
	
	#generate the Monday Blog image!
	datenums = numpy.array(map(lambda x: matplotlib.dates.date2num(x.time), log_list))
	diff_dates = datenums - matplotlib.dates.date2num(first_log.time)
	hist_data, bin_edges = numpy.histogram(diff_dates, bins = 100, new = True)
	
	matplotlib.pyplot.plot(bin_edges[1:], hist_data)
	matplotlib.pyplot.xlabel('Days since Meme')
	matplotlib.pyplot.ylabel('Request per Day')
	
	matplotlib.pyplot.savefig('hist_fig.png')
	
	all_vals = ref_dict.values()
	all_vals.sort(key = len, reverse = True)
	
	matplotlib.pyplot.figure()
	NUM_DOMAINS = 5
	for this_item in all_vals[0:NUM_DOMAINS]:
		this_datenums = numpy.array(map(lambda x: matplotlib.dates.date2num(x.time), this_item))
		diff_dates = this_datenums - matplotlib.dates.date2num(first_log.time)
		hist_data, bin_edges = numpy.histogram(diff_dates, bins = 100, new = True)
		
		matplotlib.pyplot.plot(bin_edges[1:], hist_data.cumsum(), label = this_item[0].ref_domain)
	
	matplotlib.pyplot.legend(map(lambda x: x[0].ref_domain, all_vals[0:NUM_DOMAINS]), 
								loc = 0)
	matplotlib.pyplot.xlabel('Days since Meme')
	matplotlib.pyplot.ylabel('Request per Day')
	matplotlib.pyplot.savefig('domain_fig.png')
	
	
	
	
	
	
	
	
	
	
