#!/usr/bin/python
#
#    This file is part of SRIS.
#
#    SRIS is free software: you can redistribute it and/or modify
#    it under the terms of the GNU General Public License as published by
#    the Free Software Foundation, either version 3 of the License, or
#    (at your option) any later version.
#
#    SRIS is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU General Public License for more details.
#
#    You should have received a copy of the GNU General Public License
#    along with SRIS.  If not, see <http://www.gnu.org/licenses/>.
#
#    Copyright 2011 David Irvine
#

import urllib, urllib2, os
import time
import tempfile
import gzip
import grp
import pwd
import urllib
import json
import logging
import ConfigParser
from urlparse import urlparse, urlunparse
from socket import gethostname,getfqdn

class LocalScanner():
	def __init__(self,url=None, fs_id=None):
		self.url=url
		self.fs_id=fs_id
		self.scan_id=None
		self.files=[]
		self.filesUnread=0
		self.filesScanned=0
		self.vfsData={}
	
	def readVFSData(self):
		vs=os.statvfs(self.url.path)
		self.vfsData={
				'optional':{
					'bsize':vs.f_bsize,
					'frsize':vs.f_frsize,
					},
				'size':vs.f_frsize*vs.f_blocks,
				'free':vs.f_frsize*vs.f_bfree,
				'filesListed':vs.f_files,
				}

	def createScan(self):
		url=config.get("Server","URL")
		f = urllib.urlopen("%s/scanner/%s/createScan/" % (url, self.fs_id))
		data =json.loads(f.read())
		self.scan_id=data['scan_id']

	def scan(self):
		## Scans file systems that are local to the machine, generally this would be a directly connected drive
		# but there is no reason it cannot be any pre-mounted file system.
		max_files=int(config.get("Scanner","FilesPerChunk"))

		hostname=gethostname()
		for root, subFolders, files in os.walk(self.url.path):
			for file in files:
				try:
					localPath=os.path.join(root,file)
					globalPath="file://%s%s"%(hostname,localPath)
					s=os.stat(localPath)
					data={
							'globalRoot':"file://%s%s"%(hostname,self.url.path),
							'globalPath':globalPath,
							'localPath':localPath,
							'uid':	s.st_uid,
							'gid':	s.st_gid,
							'size':	s.st_size,
							'atime':s.st_atime,
							'mtime':s.st_mtime,
							'ctime':s.st_ctime,
						}
					try:
						data['uname']=pwd.getpwuid(data['uid'])[0]
					except KeyError:
						data['uname']='UNKNOWN'
					try:
						data['gname']=grp.getgrgid(data['gid'])[0]
					except KeyError:
						data['gname']='UNKNOWN'
					self.files.append(data)	
					self.filesScanned+=1
					if len(self.files)>=max_files:
						logging.error("Max number of files scanned, uploading files to server")
						self.upload()
				except OSError:
					self.filesUnread+=1
					pass
				# Check if the length of files is more than the chunk size, if it is, upload the chunk
		logging.info("Scanning complete, uploading remaining files to server")
		self.upload()



	## Uploads a chunk of data to the server in the form of a compressed (gzip) file containing json data.
	def upload(self):
		# Don't upload empty data sets.
		if len(self.files)<1:
			return

		# Build the URL from the server, id of the scan
		url=config.get("Server","URL")
		url="%s/scanner/%s/upload/"%(url, self.scan_id)
		
		# Create a tempory file, this will get cleaned up automatically
		tf=tempfile.TemporaryFile(mode='w+b')

		# Add gzip compression to the file
		gf=gzip.GzipFile(fileobj=tf)

		# Dump the file information in json format out the the gzip file
		json.dump(self.files, gf)

		# Close the gzip file, but not the temporary file, this closes the archive, but doesn't delete 
		# the file from disk yet.
		gf.close()

		# Return to the start of the file.
		tf.seek(0)

		# Read the file.
		data=tf.read()
		# The length of the data stream is the number of bytes in the data string.
		length=len(data)
		
		# Upload the data to the server.
		request = urllib2.Request(url, data=data)
		request.add_header('Cache-Control', 'no-cache')
		request.add_header('Content-Length', '%d' % length)
		request.add_header('Content-Type', 'application/gzip')
		try:
			urllib2.urlopen(request)
		except urllib2.HTTPError, error:
			print error.read()

		# Self.files has been uploaded, so it is now set to an empty array
		del self.files[:]




	def finalize(self):
		data={}
		self.readVFSData()
		data['vfsData']=self.vfsData
		data['filesScanned']=self.filesScanned
		data['filesUnread']=self.filesUnread
		url=config.get("Server","URL")
		url="%s/scanner/%s/finalize/"%(url, self.scan_id)
		urllib2.urlopen(url, json.dumps(data))





config = ConfigParser.RawConfigParser()
config.read('/etc/sris.cfg')
config.read('~/.sris.cfg')
config.read('sris.cfg')

url=config.get("Server","URL")
scanner_id=config.get("Scanner","Name")

f = urllib.urlopen("%s/scanner/list/%s/" % (url, scanner_id))
data =json.loads(f.read())

for fs in data:
	o=urlparse(fs['URL'])
	data=None
	if (o.scheme=='local'):
		logging.error("Scannings filesyste: %s" % fs['URL'])
		scanner=LocalScanner(url=o,fs_id=fs['id'])
	else:
		logging.error("Unable to parse URL for filesystem: %s" % fs['URL'])
		LogData.log(fs['URL'],"ERROR","Unable to parse URL for filesystem: %s" % fs['URL'])
		continue
	scanner.createScan()
	scanner.scan()
	scanner.finalize()
