#!/usr/bin/env python

import os, re

dirs = {
	'nginx': '/var/log/nginx',
	'httpd': '/var/log/httpd',
}

host = {}
logs = {}
files = {}

reHostname = re.compile('(.*)\.access\.log.*')
reStats = re.compile('awstats\d+\.(.*)\.txt')
reSiteLine = re.compile('SiteDomain=.*')
reLogLine = re.compile('LogFile=.*')
reOtherLine = re.compile('^\s*(#.*|)$')
reBz2 = re.compile('.*\.bz2')
reLog = re.compile('.*\.log')

# collecting unique log files with their archived copies
for item in dirs:
	for file in os.listdir(dirs[item]):
		# we think that each file should end with '.access.log' and optionally with '.bz2' extension
		hostname = reHostname.search(file)
		if hostname:
			name = '%s:%s'%(item, hostname.group(1))
			if name not in host.keys():
				host[name] = {'dir':'', 'files':{}, 'lastlog':0}
			host[name]['dir'] = dirs[item]
			filename = '%s/%s'%(dirs[item], file)
			last_change = os.stat(filename)[8]
			host[name]['files'][last_change] = filename
			files[filename] = last_change

# checking data files for modification times
for file in os.listdir('/usr/local/www/awstats/cgi-bin'):
	# we think that each data file should be in format 'awstats<numbers>.*.txt'
	log = reStats.search(file)
	if log:
		name = log.group(1)
		if name in host:
			modified = os.stat('/usr/local/www/awstats/cgi-bin/' + file)[8]
			if modified > host[name]['lastlog']:
				host[name]['lastlog'] = modified
		else:
			# in case there are some stale data files
			print "Can't find source for awstats log %s"%file

for site in sorted(host):
	thisConfig = '/usr/local/www/awstats/cgi-bin/awstats.' + site + '.conf'
	# creating config if there's no any
	if not os.access(thisConfig, os.F_OK):
		# we are using awstats provided template
		sourceConfig = '/usr/local/www/awstats/cgi-bin/awstats.model.conf'
		if thisConfig == sourceConfig:
			print 'Unexpected error - trying to recreate sample config'
		source = open(sourceConfig, 'r')
		this = open(thisConfig, 'w')
		for line in source:
			stripped = line.strip()
			if reOtherLine.match(stripped):
				continue
			elif reLogLine.match(stripped):
				this.write('LogFile="bzcat %(dir)s/%(site)s.access.log.0.bz2 ; cat %(dir)s/%(site)s.access.log |"\n'%{'dir':host[site]['dir'], 'site':site})
			elif reSiteLine.match(stripped):
				this.write('SiteDomain="%s"\n'%site)
			else:
				this.write(line)
	filetimes = sorted(host[site]['files'])
	# finally processing all log files
	for eachtime in filetimes:
		# but only that ones that really are newer then any of our data files
		if eachtime > host[site]['lastlog']:
			# TODO: beam me up if happens !!!
			# there is a small possibility the log rotates between our freshness
			# check and actuall awstats run; if this ever happens then awstats data
			# files should be backed up before execution, the awstats would be run,
			# and only after that we can have a freshness check and optional rollback
			if reBz2.match(host[site]['files'][eachtime]):
				# if this is compressed archived log - we are looking at it time
				if os.stat(host[site]['files'][eachtime])[8] == eachtime:
					os.system('cd /usr/local/www/awstats/cgi-bin; ./awstats.pl -config=%s -LogFile="bzcat %s |" >/dev/null'%(site, host[site]['files'][eachtime]))
			elif reLog.match(host[site]['files'][eachtime]):
				# if this is real log - the time of last file change can jump up very
				# fast, so we are looking at last compressed log file, if that one
				# doesn't change than there was no rotate, or if there's no last
				# archived file - this one is current
				last_compressed = '%s.0.bz2'%host[site]['files'][eachtime]
				if not ( os.access(last_compressed, os.F_OK) and not os.stat(last_compressed)[8] == files[last_compressed] ):
					os.system('cd /usr/local/www/awstats/cgi-bin; ./awstats.pl -config=%s -LogFile=%s >/dev/null'%(site, host[site]['files'][eachtime]))
			else:
				break

# updating index
index = open('/usr/local/www/awstats/index.xml', 'w')

index.write('''<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="/dirindex.xsl"?>
<!DOCTYPE dir [
	<!ELEMENT dir	(url)*>
	<!ELEMENT url	EMPTY>
	<!ATTLIST url	name CDATA #REQUIRED>
]>
<dir>
''')
for site in host:
	index.write('	<url name="' + site + '" />\n')
index.write('</dir>\n')
