#!/usr/bin/env pyhton
#coding=utf8
"""
@描述：将日志上传到hdfs
@重点：多线程去数据库拿记录的同步问题
@日期：2015-05-05 Tuesday
"""

import sys
import commands
import time
import os
import threading
from threading import Thread

sys.path.append(".")
from DBUtils import MySQLDB

class UpLoad(Thread):
	partitions = list()
	def __init__(self,log_name, lock, selectSql, updateSql1, updateSql2):
		Thread.__init__(self)
		self.log_name = log_name
		self.lock = lock
		self.selectSql = selectSql
		self.updateSql1 = updateSql1
		self.updateSql2 = updateSql2
		self.dbHandler =  MySQLDB("10.195.82.35","root","",3306,"utf8")
		self.tryTime = 0
		self.hasLog = False
	def run(self):
		while(True):
			result = list()
			self.lock.acquire()
			result = self.dbHandler.select(self.selectSql, [])
			print(threading.currentThread().getName() + " load " + str(len(result)) + " records")
			if(len(result) <= 0):
				# wait 2 hours
				if(not self.hasLog):
					print(threading.currentThread().getName() + " is sleep")
					self.lock.release()
					time.sleep(10 * 60)
					continue
				else:
					if(self.tryTime > 10):
						print(threading.currentThread().getName() + " is exit")
						self.lock.release()
						break
					else:
						print(threading.currentThread().getName() + " is sleep")
						self.tryTime += 1
						self.lock.release()
						time.sleep(10)
						continue
			self.hasLog = True
			begin_put = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time()))
			for (local_file) in result:
				self.dbHandler.update(self.updateSql1, [begin_put, local_file[0]])
				self.createPartition(local_file[0])
			self.lock.release()
			for (local_file) in result:
				self.put(local_file[0])
		self.dbHandler.close()
	
	## 上传
	def put(self,local_file):
		#hdfs_path = "/user/hive/warehouse/pass_data.db/" + local_file[local_file.index(self.log_name):local_file.index("atte")]
		hdfs_path = "/user/hive/warehouse/pass_data.db/" + local_file[local_file.index(self.log_name):local_file.index("atte")]
		(status,output) = commands.getstatusoutput("source /home/hadoop/.bashrc && /home/hadoop/hadoop-2.5.0-cdh5.2.3/bin/hdfs dfs -ls " + hdfs_path)
		if(status != 0):
			commands.getstatusoutput("source /home/hadoop/.bashrc && /home/hadoop/hadoop-2.5.0-cdh5.2.3/bin/hdfs dfs -mkdir -p " + hdfs_path)
		(status, output) = commands.getstatusoutput("source /home/hadoop/.bashrc && /home/hadoop/hadoop-2.5.0-cdh5.2.3/bin/hdfs dfs -put " + local_file + " " + hdfs_path)
		put_status = "success"
		if(status != 0):
			if("exists" not in output):
				put_status = "fail"
				print(output)
		end_put = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(time.time()))
		self.dbHandler.update(self.updateSql2,[end_put, put_status, local_file])
		print(threading.currentThread().getName() + " put " + put_status + " " + local_file)
	## 创建分区
	def createPartition(self, local_file):
		tpl = local_file[local_file.index("tpl=") + 4 : local_file.index("/dt=")]
		dt = local_file[local_file.index("dt=") + 3 : local_file.index("/atte")]
		p = "tpl=" + tpl + ",dt=" + dt
		update = "update pass_data.pass_info set partition_status=%s where local_file=%s"
		partition_status = "success"
		if(p in UpLoad.partitions):
			self.dbHandler.update(update, [partition_status, local_file])
			print(threading.currentThread().getName() + " partition " + partition_status + " " + local_file)
			return
		UpLoad.partitions.append(p)
		hadoop_cmd = "use pass_data;alter table ssn_log add partition(tpl='" + tpl + "',dt=" + dt + ") location 'hdfs:///user/hive/warehouse/pass_data.db/ssn_log/tpl=" + tpl + "/dt=" + dt + "'"
		# print(hadoop_cmd)
		(status, output) = commands.getstatusoutput("source /home/hadoop/.bashrc && /home/hadoop/hive-0.13.1-cdh5.2.3/bin/hive -e \"" + hadoop_cmd + "\"")
		if(status != 0):
			if("AlreadyExistsException" not in output):
				partition_status = "fail"
				print(output)
		self.dbHandler.update(update, [partition_status, local_file])
		print(threading.currentThread().getName() + " partition " + partition_status + " " + local_file)
## get host
def my_host():
	sys = os.name
	if sys == "nt":
		hostname = os.getenv("computername")
		return hostname.strip()
	elif sys == "posix":
		host = os.popen("echo $HOSTNAME")
		try:
			hostname = host.read()
			return hostname.strip()
		finally:
			host.close()
	else:
		return ""

if __name__ == "__main__":
	import datetime
	log_date = (datetime.date.today() - datetime.timedelta(days=2)).strftime("%Y%m%d")
	if(len(sys.argv) == 2):
		log_date = sys.argv[1]

	host = my_host()
	selectSql = "select local_file from pass_data.pass_info where get_status='success' and end_get is not null and begin_put is null and log_date='" + log_date + "' and host='" + host + "' limit 3"
	updateSql1 = "update pass_data.pass_info set begin_put=%s where local_file=%s"
	updateSql2 = "update pass_data.pass_info set end_put=%s,put_status=%s where local_file=%s"

	log_name = "ssn_log"
	lock = threading.Lock()
	l_download = list()
	for i in range(0,5):
		l_download.append(UpLoad(log_name, lock, selectSql, updateSql1, updateSql2))
	
	for d in l_download:
		d.start()
	
	for d in l_download:
		d.join()
