#!/usr/bin/env python
"""
@ auther:hongxingfan
@ desc:down login_log,reg_log,ssn_log
@ date:2015-04-10 Friday
"""

## Get host  from mysql,
## First method get files from company hadoop cluster,insert file into mysql
import sys
import commands
import time
import os
from Queue import Queue
from threading import Thread
import threading

sys.path.append(".")

from DBUtils import MySQLDB

## ls hdfs path,sleep if it's not exist,otherwise return arraies
def ls_hdfs(log_path,log_name,log_date):
	hadoop_cmd = "/home/work/.jumbo/bin/hadoop dfs -ls " + log_path + "/" + log_name + "/*/dt=" + log_date + "/*.gz | awk '{print $6\" \"$7\" \"$8}' | sort | awk '{print $3}'"
	#print(hadoop_cmd)
	(status,output) = commands.getstatusoutput(hadoop_cmd)
	if(status == 0):
		return output.split("\n")
	else:
		time.sleep(60 * 5) # sleep 5 minutes
		print("ssn_log not exists,sleep 5minutes")
		ls_hdfs(log_path,log_name,log_date)


## get host
def my_host():
	sys = os.name
	if sys == "nt":
		hostname = os.getenv("computername")
		return hostname.strip()
	elif sys == "posix":
		host = os.popen("echo $HOSTNAME")
		try:
			hostname = host.read()
			return hostname.strip()
		finally:
			host.close()
	else:
		return ""

# product
class Product(Thread):
	
	hostNum = 3

	def __init__(self,host,log_name,log_date,queue,files):
		Thread.__init__(self)
		self.host = host
		self.log_name = log_name
		self.log_date = log_date
		self.queue = queue
		self.files = files
		self.num = 0
		self.db = MySQLDB("10.195.82.35","root","",3306,"utf8")
		self.id = self.db.select("select id from pass_data.host_id where host=%s",[self.host])[0][0]
		self.result = self.db.select("select remote_file from pass_data.pass_info where log_date=%s", [self.log_date])
		self.has = list()

		for r in self.result:
			self.has.append(r[0])
		#print(self.has)
	def run(self):
		for f in self.files:
			self.num += 1
			if(self.num % Product.hostNum == int(self.id)):
				if(f in self.has):
					print(threading.currentThread().getName() + " existed " + f)
					continue
				local_dir = "/home/work/workToHadoop/ssn_log/" + f[f.index("tpl="):f.index("/attempt")]
				if(not os.path.exists(local_dir)):
					os.makedirs(local_dir)
					#os.chdir(local_dir)
				self.queue.put(f)
				local_path = "/home/work/workToHadoop/ssn_log/" + f[f.index("tpl="):]
				sql = "insert into pass_data.pass_info(host,log_name,log_date,begin_get,remote_file) values(%s,%s,%s,%s,%s)"
				begin_get = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time()))
				param = (self.host,self.log_name,self.log_date,begin_get,f)
				self.db.insert(sql,param)
				print(threading.currentThread().getName() + " product " + f)

		print(threading.currentThread().getName() + " product is over!")
		self.db.close()
# consume
class Consume(Thread):
	def __init__(self,queue,host,log_name,log_date):
		Thread.__init__(self)
		self.queue = queue
		self.host = host
		self.log_name = log_name
		self.log_date = log_date
		self.tryTime = 0
		self.db = MySQLDB("10.195.82.35","root","",3306,"utf8")
	def run(self):
		while (True):
			if(self.queue.empty()):
				if(self.tryTime < 10):
					print(threading.currentThread().getName() + " consume is sleep")
					time.sleep(2)
					self.tryTime += 1
					continue
				else:
					print(threading.currentThread().getName() + " consume is exit")
					sys.exit(1)
			else:
				f = self.queue.get()
			print(threading.currentThread().getName() + " consume get " + f)
			self.down(f)
		self.db.close()
	def down(self, f):
		local_path = "/home/work/workToHadoop/ssn_log/" + f[f.index("tpl="):]
		(status,output) = commands.getstatusoutput("/home/work/.jumbo/bin/hadoop dfs -get " + f + " " + local_path)
		get_status = "success"
		if(status != 0):
			get_status = "fail"
			local_path = ""
		print(threading.currentThread().getName() + " consume " + get_status + " " + output + " " + f)
		end_time = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time()))
		sql = "update pass_data.pass_info set end_get=%s,get_status=%s,local_file=%s where host=%s and log_name=%s and log_date=%s and remote_file=%s"
		param = (end_time,get_status,local_path,self.host,self.log_name,self.log_date,f)
		self.db.update(sql, param)


if __name__ == "__main__":
	## ls hdfs file ,insert into mysql and queue
##
	log_path = "/app/passport/pass_data/pass_data.db"
	log_name = "ssn_log"
	import datetime
	log_date = (datetime.date.today() - datetime.timedelta(days=2)).strftime("%Y%m%d")
	if(len(sys.argv) == 2):
		log_date = sys.argv[1]
	
	files = ls_hdfs(log_path,log_name,log_date)
	host = my_host()
	#host = "nj03-game-m22dianquan56.nj03.baidu.com"	

	queue = Queue(10)
	product = Product(host,log_name,log_date,queue,files)
	product.start()
	
	time.sleep(3)
	l_consumes = list()
	for i in range(0,5):
		l_consumes.append(Consume(queue,host,log_name,log_date))
	for i in l_consumes:
		i.start()

	for i in l_consumes:
		i.join()
	
	product.join()
	#print("The End!")
	#print my_host()
