#!/usr/bin/python
# coding=utf8
import os
import re
import json
import time
from datetime import datetime

appName="testApp"
hostName="localhost:8080"
logType="kylinlog"
server="http://10.252.18.78:9090/log"
filePrefix="sm.log"


tmpDir="/tmp/kylog/"
tmpUploadFile="tmp.json"
tmpFileInfo="kylog.info"


def sendLog(kylogfilepath,startLine,endLine):
	awkcmd=r"""
	sed -n '%s,%sp' %s |
	awk '
		function ltrim(s) { sub(/^[ \t\r\n]+/, "", s); return s }
		function rtrim(s) { sub(/[ \t\r\n]+$/, "", s); return s }
		function trim(s) { return rtrim(ltrim(s)); }
		/^Query Id:/{split($0,rowdata,":");}
		/^SQL:/{s=trim($0);next;}
		/^User:/{print "19700101 000000000 INFO  [Query "trim(rowdata[2])" "s;s="";print $0;}
		/^==========================/{delete rowdata;}
		{
			if(rowdata[2]!="")
				if(s!="") s=(s" "trim($0))
				else
					print "19700101 000000000 INFO  [Query "trim(rowdata[2])" "$0;
			else gsub("[,:-]","",$1);gsub("[,:-]","",$2);$5=substr($5,0,36);print ($0);
		}

	'   | nl -n ln | grep "\\[Query" | tr "\t" " " | tr -s '  ' ' '  |  sort   -k6,6 -k1n,1   | cut -d ' ' -f 2,3,4,6-  | sed '/Using project:/{x;p;x;}'
	"""%(startLine,endLine,kylogfilepath)

	logdata={}

	groupRe=re.compile(r"groupsD=\[.*?\]")
	filterRe=re.compile(r"filterD=\[.*?\]")

	kylogs = os.popen(awkcmd).readlines()
	for line in kylogs:
		data=line.split()
		if "Using project:" in line:
			logdata[data[3]]={"project":data[8],"queryTime":data[0][0:4]+"-"+data[0][4:6]+"-"+data[0][6:]+" "+data[1][0:2]+":"+data[1][2:4]+":"+data[1][4:6]+"."+data[1][6:],"sb":[],"queryId":data[3]}
			logdata[data[3]]["groupList"]=""
			logdata[data[3]]["filterList"]=""
			logdata[data[3]]["segment"]=""


		if(len(data)>=3 and len(data[3])==36 and logdata.get(data[3],None)!=None):
			logdata[data[3]]["sb"].append(line)
			if "groupsD=[" in line:
				s=groupRe.search(line).group()
				s=s.replace("groupsD=[","")
				s=s.replace("]","")
				s=s.replace(" ","")
				if len(s)!=0:
					s=s+","
				logdata[data[3]]["groupList"]=s
			if "filterD=[" in line:
				s=filterRe.search(line).group()
				s=s.replace("filterD=[","")
				s=s.replace("]","")
				s=s.replace(" ","")
				if len(s)!=0:
					s=s+","
				logdata[data[3]]["filterList"]=s
			if "Init CubeSegmentScanner for segment" in line:
				segment=line[line.find("Init CubeSegmentScanner for segment"):].replace("Init CubeSegmentScanner for segment","").strip ()
				logdata[data[3]]["segment"]=segment
			if "19700101 000000000" in line and " SQL:" in line :
				sql=line[line.find(" SQL: "):].replace(" SQL: ","").strip()
				logdata[data[3]]["sql"]=sql
			if "19700101 000000000" in line and " User: " in line:
				user=line[line.find(" User: "):].replace(" User: ","").strip()
				logdata[data[3]]["user"]=user

			if "19700101 000000000" in line and " Success: " in line:
				suc=line[line.find(" Success: "):].replace(" Success: ","").strip()
				logdata[data[3]]["success"]=suc

			if "19700101 000000000" in line and " Duration: " in line:
				dur=line[line.find(" Duration: "):].replace(" Duration: ","").strip()
				logdata[data[3]]["duration"]=dur

			if "19700101 000000000" in line and " Project: " in line:
				pro=line[line.find(" Project: "):].replace(" Project: ","").strip()
				logdata[data[3]]["project"]=pro

			if "19700101 000000000" in line and " Realization Names: " in line:
				s=line[line.find(" Realization Names: "):].replace(" Realization Names: ","").strip()
				logdata[data[3]]["realizationNames"]=s

			if "19700101 000000000" in line and " Cuboid Ids: " in line:
				s=line[line.find(" Cuboid Ids: "):].replace(" Cuboid Ids: ","").strip()
				logdata[data[3]]["cuboidIds"]=s

			if "19700101 000000000" in line and " Total scan count: " in line:
				s=line[line.find(" Total scan count: "):].replace(" Total scan count: ","").strip()
				logdata[data[3]]["totalScanCount"]=int(s)

			if "19700101 000000000" in line and " Total scan bytes: " in line:
				s=line[line.find(" Total scan bytes: "):].replace(" Total scan bytes: ","").strip()
				logdata[data[3]]["totalScanBytes"]=int(s)

			if "19700101 000000000" in line and " Result row count: " in line:
				s=line[line.find(" Result row count: "):].replace(" Result row count: ","").strip()
				logdata[data[3]]["resultRowCount"]=int(s)

			if "19700101 000000000" in line and " Accept Partial: " in line:
				s=line[line.find(" Accept Partial: "):].replace(" Accept Partial: ","").strip()
				logdata[data[3]]["acceptPartial"]=s

			if "19700101 000000000" in line and " Is Partial Result: " in line:
				s=line[line.find(" Is Partial Result: "):].replace(" Is Partial Result: ","").strip()
				logdata[data[3]]["isPartialResult"]=s

			if "19700101 000000000" in line and " Hit Exception Cache: " in line:
				s=line[line.find(" Hit Exception Cache: "):].replace(" Hit Exception Cache: ","").strip()
				logdata[data[3]]["hitExceptionCache"]=s

			if "19700101 000000000" in line and " Storage cache used: " in line:
				s=line[line.find(" Storage cache used: "):].replace(" Storage cache used: ","").strip()
				logdata[data[3]]["storageCacheUsed"]=s

			if "19700101 000000000" in line and " Is Query Push-Down: " in line:
				s=line[line.find(" Is Query Push-Down: "):].replace(" Is Query Push-Down: ","").strip()
				logdata[data[3]]["isQueryPushDown"]=s

			if "19700101 000000000" in line and " Is Prepare: " in line:
				s=line[line.find(" Is Prepare: "):].replace(" Is Prepare: ","").strip()
				logdata[data[3]]["isPrepare"]=s

			if "19700101 000000000" in line and " Trace URL: " in line:
				s=line[line.find(" Trace URL: "):].replace(" Trace URL: ","").strip()
				logdata[data[3]]["traceURL"]=s

			if "19700101 000000000" in line and " Message: " in line:
				s=line[line.find(" Message: "):].replace(" Message: ","").strip()
				logdata[data[3]]["message"]=s


	for queryId,obj in logdata.items():
		l=obj["sb"]
		obj["sb"]="".join(l)

		for field,val in obj.items():
			if "true" == val :
				obj[field]=True
			if "false" == val :
				obj[field]=False
		if "message" not in obj.keys():
			logdata.pop(queryId)

	if len(logdata)==0:
		return 0

	result={}
	result["appName"]=appName
	result["hostName"]=hostName
	result["type"]=logType
	result["collectTime"]=int(round(time.time() * 1000))
	result["logData"]=json.dumps({"LogList":logdata.values()})

	if os.path.exists(tmpDir)==False:
		os.makedirs(tmpDir)
	with open(tmpDir+tmpUploadFile,"w") as f:
		f.write(json.dumps(result))

	response=os.popen('curl --limit-rate 8M --retry-max-time 3000 --max-time 3000  -compress --header "Accept-Encoding: gzip" --header "Content-type: application/json;charset=UTF-8" -X POST -d @'+tmpDir+tmpUploadFile+' '+ server ).readlines()
	for r in response:
		return json.loads(r).get("code")

def saveFileInfo():
	global fileinfo
	with open(tmpDir+tmpFileInfo,"w") as f:
		for k,v in fileinfo.items():
			if os.path.exists(v[2]) and v[0]==str(os.stat(v[2]).st_ino):
				f.write(" ".join(v)+"\n")

def initFileInfo():
	if os.path.exists(tmpDir+tmpFileInfo)==False :
		cmd ="ls -a -i -l "+filePrefix+"* | awk '{print $1\" \"1\" \"$10}' > "+tmpDir+tmpFileInfo
		res=os.system(cmd)

def getFileInfo():
	global fileinfo
	fileinfo={}
	with open(tmpDir+tmpFileInfo,"r") as f:
		lines=f.readlines()
		for line in lines:
			line=line.strip()
			l=line.split(" ")
			fileinfo[l[0]]=l

def appendFileInfo():
	global fileinfo
	cmd="ls -a -i -l "+filePrefix+"* | awk '{print $1\" \"1\" \"$10}'"
	res=os.popen(cmd)
	for line in res:
		line =line.strip()
		l=line.split(" ")
		if fileinfo.get(l[0],None)==None :
			fileinfo[l[0]]=l
		else:
			fileinfo[l[0]][2]=l[2]

fileinfo={}
initFileInfo()
getFileInfo()
appendFileInfo()
saveFileInfo()
getFileInfo()

for k,v in fileinfo.items():
	res=os.popen("wc -l "+v[2] +" | awk '{print $1}'")
	for rn in res:
		rn=int(rn)
		if rn!=int(v[1]):
			returncode=-1
			startpos=1
			endpos=rn
			if int(v[1])<=3000:
				startpos=1
			else:
				startpos=int(v[1])-3000

			returncode=sendLog(v[2],startpos,endpos)
			print("分析处理日志%s,从%s行到%s行,成功状态:%s"%(v[2],startpos,endpos,returncode))

			if returncode==0:
			 	v[1]=str(rn)

		else:
			print("分析处理日志%s,截止行%s,没有新日志不做处理"%(v[2],rn))
saveFileInfo()
