#!/usr/bin/env python
import os.path
import config
import re

# settings
filelocation="/media/data/Projects/Sambaindex/Filelist.txt"
ignoredlines=['smb://155.92.104.57/OtherServers']

# initial variables
nextscan=0
curhostip=''
curhostid=0
curpath=''
curpathid=0
skip=0		# whether to skip a host because it's a duplicate
files=[]	# a list of files to commit at once
foundhosts={}	# all of the seen hosts, used for duplicate host prevention
pathids={}	# all of the paths and what their id is
fileids={}	# files and what their id is

pathparents={}	# mapping of path ids to the parent path id
pathnums={}
pathsums={}

host_stats={}	# host statistics {hostid:ip}
file_stats=[]	# file statistics [fileid, filesize]
file_stats_single=[]	# file statistics [fileid, filedate]

notvaliddate=re.compile(r'[^\d]+')

def gethostname(ip):
	"""
	Gets the short dns name of a certain ip address
	"""
	hostname=None
	try:
		fullhostname=socket.gethostbyaddr(ip)[0]
		hostname=fullhostname.split('.')[0]
		print "  Found hostname %s via dns"%hostname
	except:
		# could not find it by dns
		for line in os.popen("nmblookup -A %s"%ip).readlines():
			if "<ACTIVE>" in line and "GROUP" not in line and not hostname:
				hostname=line.split()[0].lower()
				print "  Found hostname %s via nmblookup"%hostname
	return hostname	
		
	
def loadExtensions(cursor):
	extensions={}
	print "Loading list of extensions"
	cursor.execute("select id,extension from extensions")
	for item in cursor.fetchall():
		extensions[item[1]]=item[0]
	return extensions

def addFileToPath(pathid,filesize):	
	
	global pathnums, pathsums
	if pathid not in pathsums:
		pathsums[pathid]=0
	if pathid not in pathnums:
		pathnums[pathid]=0

	pathsums[pathid]+=filesize
	pathnums[pathid]+=1
	
	if pathparents[pathid]:
		addFileToPath(pathparents[pathid],filesize)
	
def getFullPath(hostname,pathname):
	fullpathname=hostname+"/"+pathname
	return fullpathname

def loadPathIds(hostid):
	paths={}
	print "Loading list of paths"
	global db
	cursor=database.streamingCursor(db)
	cursor.execute("select paths.id,paths.fullpathname from paths,hosts where paths.hostid=hosts.id and hosts.id=%s",hostid)
	item=cursor.fetchone()
	while item:
		paths[item[1]]=item[0]
		item=cursor.fetchone()
	cursor.close()
	return paths

def loadPathParents(hostid):
	parents={}
	print "Loading tree of path parents"
	global db
	cursor=database.streamingCursor(db)
	cursor.execute("select paths.id,parentpathid from paths,hosts where paths.hostid=hosts.id and hosts.id=%s",hostid)
	for item in cursor.fetchall():
		parents[item[0]]=item[1]
	cursor.close()
	return parents

def loadFileIds(hostid,pathid):
	fileids={}
	global db
	cursor=database.streamingCursor(db)
	cursor.execute("select id,filename from files where hostid=%s and pathid=%s",(curhostid,curpathid))
	row=cursor.fetchone()
	while row:
		if not curpathid in fileids:
			fileids[curpathid]={}
		fileids[curpathid][row[1]]=row[0]

		row=cursor.fetchone()
	cursor.close()
	return fileids

	
def savePath(curhostid,pathname):	
	global pathids
	if pathname in pathids:
		curpathid=pathids[pathname]
	else:
		parentpathid=getParentPathId(curhostid, pathname)
		shortpathname='/'.join(pathname.split('/')[-2:])	# only include the last 2 path elements in the indexed column
		pathcursor.execute("insert into paths (parentpathid, hostid, pathname, fullpathname) values (%s,%s,%s,%s)",[parentpathid,curhostid,shortpathname,pathname])
		curpathid=pathcursor.lastrowid
		pathids[pathname]=curpathid
		
		global pathparents
		pathparents[curpathid]=parentpathid
	
	
	global pathnums, pathsums
	if curpathid not in pathnums:
		pathnums[curpathid]=0
	if curpathid not in pathsums:
		pathsums[curpathid]=0
		
	return curpathid
		
def getParentPathId(hostid,pathname):
	parentpathname=os.path.dirname(pathname)
	
	global pathids
	if len(parentpathname)>0:	# not a root path, so has a parent
		if pathname in pathids:
			return pathids[pathname]
		else:
			return savePath(hostid,parentpathname)
	return None

def savePathStats(nextscan,pathnums,pathsums):
	pathstats=[[pathid, pathnums[pathid], pathsums[pathid]] for pathid in pathnums]
	pathcursor.executemany("insert into temp_path_stats (pathid, numsharedfiles, totalsize) values (%s,%s,%s)",pathstats)


print "Connecting to database"
import database
db=database.getConnection()
cursor=db.cursor()

# used for hostname resolution
import socket

# keep track of the current scan number
nextscan=database.getNextScan(cursor)

database.initTempTables(cursor)

hostcursor=db.cursor()	# used for host operations
pathcursor=db.cursor()	# used for path operations
filecursor=db.cursor()	# used for file operations

extensions=loadExtensions(cursor)		# mapping of extension name to extension id


inputfile=file(filelocation,'r')
for line in inputfile:
	ignoredhost=False
	for host in ignoredlines:
		if line[:len(host)].lower()==host.lower():
			ignoredhost=True
			break
	if ignoredhost:
		continue
	sections=line.strip().split('|')
	if len(sections)==6:
		[url,username,password,filesize,filedate,blankip]=sections
		filesize=int(filesize)
		filedate=notvaliddate.sub('', filedate)

		# parse filename into parts
		hostendindex=url.find('/',6)
		ip=url[6:hostendindex]
		
		pathendindex=url.rfind('/')
		pathname=url[hostendindex+1:pathendindex]
		
		filename=url[pathendindex+1:]
		
		if ip.count('.')!=3:		# only support hosts-by-ip
			continue 

		if curhostip!=ip:		# new host
			skip=0
			print "Looking up hostname for %s"%ip
			hostname=gethostname(ip)

			if not hostname:
				print "  Could not find hostname, skipping"
				skip=1
				curhostip=ip
				continue
					
			# reset the current share and path so that they get new ids because they're with the new host
			curhostip=ip
			curpath=None

			if hostname.lower() in foundhosts:
				if foundhosts[hostname.lower()]!=ip:	# the ip changed, but is the same hostname
					print "Skipping %s because %s was already found"%(ip,hostname)
					skip=1
					continue
				else:
					print "Found another run of %s"%ip
			foundhosts[hostname.lower()]=ip

			hostcursor.execute("select id from hosts where protocol like 'smb' and hostname like %s",[hostname])
			if hostcursor.rowcount==1:
				curhostid=hostcursor.fetchone()[0]
			else:
				print "Parsing host %s"%hostname
				hostcursor.execute("insert into hosts (protocol,hostname) values ('smb',%s)",[hostname])
				curhostid=hostcursor.lastrowid
			print "New hostid: %s"%curhostid

			host_stats[curhostid]=ip

			# load up the path information
			savePathStats(nextscan,pathnums,pathsums)
			pathids=loadPathIds(curhostid)
			pathparents=loadPathParents(curhostid)
			pathsums.clear()
			pathnums.clear()
			print "Adding files"
	
			
		if skip:	# if i'm supposed to ignore this host, because it's a duplicate
			continue
			
		if curpath!=pathname:
			curpathid=savePath(curhostid,pathname)
			curpath=pathname

			# load up the list of known files
			fileids=loadFileIds(curhostid,curpathid)
		

		dotposition=filename.rfind('.')
		if dotposition == -1:
			extensionid=None
		else:
			extension=filename[dotposition + 1 : dotposition + 1 + config.extensionlength].lower()
			
			if extension not in extensions:	# not in list of extensions
				cursor.execute('insert into extensions (extension) values (%s)',extension)
				extensionid=cursor.lastrowid
				extensions[extension]=extensionid
			else:
				extensionid=extensions[extension]
		
		# increase the path's total size
		addFileToPath(curpathid,filesize)
		
		# check for the file before 
		#filecursor.execute("select id from files where pathid=%s and filename=%s",[curpathid,filename])
		#if filecursor.rowcount==1:
		#	curfileid=filecursor.fetchone()[0]
		if curpathid in fileids and filename in fileids[curpathid]:
			curfileid=fileids[curpathid][filename]
		else:
			filecursor.execute("insert into files (hostid, pathid, filename, extensionid, added_date) values (%s,%s,%s,%s,CURDATE())",[curhostid,curpathid,filename,extensionid])
			curfileid=filecursor.lastrowid

		if len(filedate)==14:	# if the date has all 6 sections, divided by -, and is a valid line
			file_stats.append([curfileid,filesize])
			file_stats_single.append([curfileid,filedate])

		if len(file_stats)>10000:
			try:
				filecursor.executemany("insert into temp_file_stats (fileid, filesize) values (%s,%s)",file_stats)
			except:
				print("Ha! caught an exception!")
			file_stats=[]
		if len(file_stats_single)>10000:
			try:
				filecursor.executemany("insert into temp_file_stats_single (fileid, modified_date) values (%s,%s)",file_stats_single)
			except:
				print("Ha! caught an exception!")
			file_stats_single=[]
	else:
		print "This line does not have 6 sections: "+line.strip()

filecursor.executemany("insert into temp_file_stats (fileid, filesize) values (%s,%s)",file_stats)
filecursor.executemany("insert into temp_file_stats_single (fileid, modified_date) values (%s,%s)",file_stats_single)

print "Saving statistics"
savePathStats(nextscan,pathnums,pathsums)

pathcursor.execute("""SELECT hosts.id,sum(temp_path_stats.numsharedfiles), sum(temp_path_stats.totalsize)
        FROM hosts,temp_path_stats,paths
        WHERE paths.hostid=hosts.id and temp_path_stats.pathid=paths.id and paths.parentpathid is null
        GROUP BY paths.hostid""")

hostcursor.executemany("insert into temp_host_stats (hostid, ip, numsharedfiles, totalsize) values (%s,%s,%s,%s)",
					   [(row[0],host_stats[row[0]],row[1],row[2]) for row in pathcursor.fetchall()])

database.commitTempTables(cursor)
