#!/usr/bin/python
#
#	Class for Extracting 2 files from s3 and comparing them
#
#	Example :
#	./s3cmp.py '{"fields": ["b","bv","cc"] ,  "sumfields":{"count":0.5}  , "faults_min":10 , "minval" : 5 , "rates" : {"count":0.5} , "group":"test2" , "file":"searches*start.b.stug.funmoods.com" , "hours":[2,5]}'
#


import sys , time , threading , re , os , json , glob
from datetime import datetime, timedelta
from commands import getoutput

os.chdir( os.path.dirname(os.path.realpath(__file__)) )
from configger import *
from groupmanager import *
sys.path.append("plugins")
sys.path.append("../conf")
from diccionario import SERVERS

OUT_BASE = "s3://ironsrcBD"

class S3cmp(object):
	hours = []
	group_conf = {}
	compare_conf = {}
	logger = None
	tmp_dir  = "/edisk/s3tmplogs"
	files = []

	def __init__(self,conf_str ,debug=False ):
		self.logger = Logger("../logs")
		if debug:
			self.logger.setVerbose()
		self.extract_params(conf_str )


	def extract_params(self , conf_str ):
		MAND_FIELDS = ['fields','file','minval','faults_min','group' , 'hours'  ]					
		try:
			conf = json.loads(conf_str)
		except :
			print conf_str
			self.logger._log("groups.log" , "Incorrect JSON. EXIT")	
			sys.exit(1)		

		for f in MAND_FIELDS:
			if not f in conf:
				self.logger._log("groups.log" , "MISSING field %s" % (f))
				print "Error. missing fields. mandatory fields are : %s" % ( MAND_FIELDS)
				sys.exit(1)

		if len(conf["hours"]) != 2  or not re.match(r'\d+' , str(conf["hours"][0])) or not re.match(r'\d+' , str(conf["hours"][1])):
			self.logger._log("groups.log" , "Hours need to be 2 hours like '05' or '23'")
			sys.exit(1)

		if not "parser" in conf : conf["parser"] = "jsonparse"
		if not "splitter" in conf : conf["splitter"] = "^^^"

		#	SERCHBI
		#	Try to map between server host to its log name e.g "start.a.nano.funmoods.com" => "i-943fa0ed"
		#	You may need other implementaion here ...
		parts = conf["file"].split("*")
		if len(parts) > 1 and parts[1] in SERVERS : parts[1] = SERVERS[parts[1]]
		conf["file"] = "*".join(parts)

		self.name = conf["group"]
		self.group_conf = conf
		self.compare_conf = {"minval": conf["minval"] , "rates" : conf["rates"]}


	def date_before_hours(self , h , format='%Y/%m/%d'):
		date = datetime.now() - timedelta(hours=h)
		return date.strftime(format)

	def hours_before(self , h):
		date = datetime.now() - timedelta(hours=h)
		return date.strftime('%H')


		#	INOUT : searches*i-1e7ae443
	def get_logs_name(self ):
		parts 	= self.group_conf["file"].split("*")
		data = []
		for h in self.group_conf["hours"]:
			data.append({
				"dir" : "%s/%s/%s/%s/" % (OUT_BASE , parts[0]  , self.date_before_hours(int(h)) , parts[1] ) ,
				"filter" : "%s.%s-%s"  % (parts[1] ,   self.date_before_hours(int(h) , '%Y-%m-%d') , self.hours_before(int(h)))
			})
		return data

	def get_logs(self , data):
		files = []
		for row in data:
			reg = '.+' + row["filter"].replace("-","\-") + '.+'
			regex = re.compile(reg )	
			list = getoutput("s3cmd ls %s | grep -Po '\S+$'" % (row["dir"]))
			for l in list.split("\n") :
				if regex.match( l) : 
					files.append(l)
					self.files.append("%s/%s" % (self.tmp_dir , re.sub(r'(^.+/|\.bz2$)', '' , l)))

		if len(files) != 2:
			self.logger._log("groups.log" , "s3 incorrect file match")	
			print  "s3 incorrect file match"
			print files
			sys.exit(1)

		for f in files:
			getoutput("s3cmd get %s %s" % (f , self.tmp_dir))
			getoutput("bunzip2 %s/%s" % ( self.tmp_dir , re.sub(r'.+/', '' , f) ))


	def s3cmd(self):
		self.logger._log("groups.log" , "%s\tStarting Process ..." % ( self.name ))
		data = self.get_logs_name()
		self.get_logs(data)
		self.logger._log("groups.log" , "%s\tFiles Uploaded" % ( self.name ))

		self.group_conf["files"] = self.files
		parser = None
		if self.group_conf['parser'] == "jsonparse" :
			import jsonparse as pm
			parser = pm.Parser()
		gm = PairGroupManager( self.name , self.group_conf , self.compare_conf , parser , self.logger)
		_thrds = []
		for i in [0,1]:
	        	_t = threading.Thread(target=gm.run , args=(i,))
			_t.start()
			_thrds.append( _t )

		for _t in _thrds :  _t.join()
		gm.RUNNER = False
		gm.pulse()
		for f in self.files : os.unlink(f)
		self.logger._log("groups.log" , "%s\tFinish Process ..." % ( self.name ))
		

	
###########################################


#if len(sys.argv) == 1 : sys.argv.append('')
#s3 = S3cmp(sys.argv[1])
#s3.s3cmd()

		
