import cPickle
try:
	import MySQLdb as sqldb
except:
	import pymysql as sqldb
import sys,re
import os
from collections import Counter
from collections import defaultdict
from operator import itemgetter
import numpy as np
from numpy import arange,array,ones,linalg,bincount

from cdf import *
xindex={}
c1_db="clus_1.db"
c2_db="clus_2.db"
cno_db="clus_2_no.db"
fcode="az_ip_code.csv"
all_cno_set={0:{},1:{}}

for x in open("xindex.db"):
	xindex[x.strip("\n")]=1

tm_index=sorted(xindex)

def init_bl():
	bl=cPickle.load(open("bl.db"))
	return bl

def init_db(dbn,blacklist):
	db=cPickle.load(open(dbn))
	for k in db.keys():
		if db[k] in blacklist:
			db.pop(k)
	return db

def init_tm_ip_matrix(db):
	m={}
	for k in db:
		c=db[k]
		ip,tm=k
		if tm not in xindex:
			continue
		if c not in m:
			m[c]={}
		if tm in m[c]:
			m[c][tm].append(ip)
		else:
			m[c][tm]=[ip]
	return m

def fix_data(tm_ip):
	st=sorted(tm_ip)[0]
	ed=sorted(tm_ip)[-1]
	st_id=tm_index.index(st)
	ed_id=tm_index.index(ed)
	c=0
	for tm_id in range(st_id,ed_id+1):
		tm=tm_index[tm_id]
		#add missing data
		if tm not in tm_ip:
			ptm=tm_index[tm_id-1]
			atm=tm_index[tm_id+1]
			if (ptm in tm_ip) and (atm in tm_ip):
				if (len(tm_ip[ptm])==1 and len(tm_ip[atm])==1):
					tm_ip[tm]=tm_ip[ptm]
					continue
				if len(set(tm_ip[ptm])&set(tm_ip[atm]))!=0 and (len(tm_ip[ptm])==1 or len(tm_ip[atm])==1):
					tm_ip[tm]=tm_ip[ptm]
					continue

	#fill the valley
	tms=sorted(tm_ip)
	for tm_id in xrange(len(tms)-1):
		tm=tms[tm_id]
		ptm=tms[tm_id-1]
		atm=tms[tm_id+1]
		if len(tm_ip[ptm])==len(tm_ip[tm])+1 and len(tm_ip[atm])==len(tm_ip[tm])+1:
			if set(tm_ip[tm])&set(tm_ip[ptm])==set(tm_ip[tm]):
				tm_ip[tm]=tm_ip[ptm]
			else:
				tm_ip[tm]=tm_ip[atm]
	return tm_ip

def if_small_user(tm_ip):
	tmp=[]
	for tm in tm_ip:
		tmp.append(len(tm_ip[tm]))
	data = Counter(tmp)
	if 1==data.most_common(1)[0][0]:
		if data.most_common(1)[0][1]>len(tm_ip)*2/3:
			return True
	return False

def group_user():
	for cno in matrix:
		if if_small_user(matrix[cno]):
			all_cno_set[0][cno]=1
		else:
			all_cno_set[1][cno]=1
	return all_cno_set

db=init_db(c2_db,init_bl())
matrix=init_tm_ip_matrix(db)
for k in matrix:
		matrix[k]=fix_data(matrix[k])
all_cno_set=group_user()

def pt_stable(tm_ip):
	tmp={}
	for k in tm_ip:
		tmp[len(tm_ip[k])]=1
	if len(tmp)==1:
		return True
	return False

def pt_ab_increase(tm_ip):
	tmp=[]
	for tm in sorted(tm_ip):
		tmp.append(len(tm_ip[tm]))
	for k in xrange(len(tmp)-1):
		if tmp[k]>tmp[k+1]:
			return False
	return True

def pt_ab_decrease(tm_ip):
	tmp=[]
	for tm in sorted(tm_ip):
		tmp.append(len(tm_ip[tm]))
	for k in xrange(len(tmp)-1):
		if tmp[k]<tmp[k+1]:
			return False
	return True

def pt_peak_once(tm_ip):
	tmp=[]
	for tm in sorted(tm_ip):
		tmp.append(len(tm_ip[tm]))
	t=[tmp[0]]
	for v in tmp:
		if t[-1]!=v:
			t.append(v)
	pos=t.index(max(t))
	for i in range(0,pos):
		if t[i]>t[i+1]:
			return False
	for i in range(pos,len(t)-1):
		if t[i]<t[i+1]:
			return False
	return True

def pt_valley_once(tm_ip):
	tmp=[]
	for tm in sorted(tm_ip):
		tmp.append(len(tm_ip[tm]))
	t=[tmp[0]]
	for v in tmp:
		if t[-1]!=v:
			t.append(v)
	pos=t.index(min(t))
	for i in range(0,pos):
		if t[i]<t[i+1]:
			return False
	for i in range(pos,len(t)-1):
		if t[i]>t[i+1]:
			return False
	return True

def get_code(ip_tm_set):
	for l in open(fcode):
		ip,tm,code=l.strip("\n").split(",")
		k=(ip,tm)
		if k in ip_tm_set:
			ip_tm_set[k]["code"]=code
	return ip_tm_set

def init_server_version(cno_set):
	tdb=init_db(cno_db,init_bl())
	for k in tdb:
		cno=tdb[k]
		if cno in cno_set:
			cno_set[cno]=k[4]
	return cno_set

def count_server(cno_server_set):
	main=[]
	for k in cno_server_set:
		server=cno_server_set[k]
		if server:
			#server=''.join([i for i in server if not i.isdigit()]).replace("/","").replace(".","")
			main.append(server)
	r=Counter(main)
	r=sorted(r.iteritems(), key=lambda d:d[1], reverse = True)[0:50]
	return r

def count_server_minor(cno_server_set):
	target=["Microsoft-IIS","Apache","nginx"]
	out={}
	for t in target:
		out[t]=[]
	for k in cno_server_set:
		server=cno_server_set[k]
		if not server:
			continue
		for t in target:
			if server.find(t)!=-1:
				minor=server.split(" ")[0]
				minor=minor.split("/")
				minor=[v.strip(",") for v in minor]
				if len(minor)>=2:
					minor=minor[1]
				else:
					break
				out[t].append(minor)
				break
	for k in out:
		r=Counter(out[k])
		r=sorted(r.iteritems(), key=lambda d:d[1], reverse = True)[0:50]
		out[k]=r
	return out

def init_tmpl_version(cno_set):
	tdb=init_db(cno_db,init_bl())
	for k in tdb:
		cno=tdb[k]
		if cno in cno_set:
			cno_set[cno]=k[2]
	return cno_set

def count_tmpl(cno_tmpl_set):
	main=[]
	for k in cno_tmpl_set:
		tmpl=cno_tmpl_set[k]
		if tmpl:
			#server=''.join([i for i in server if not i.isdigit()]).replace("/","").replace(".","")
			main.append(tmpl.replace("'","").replace('"',''))
	r=Counter(main)
	r=sorted(r.iteritems(), key=lambda d:d[1], reverse = True)[0:50]
	return r
	
def count_tmpl_minor(cno_tmpl_set):
	target=["WordPress","Joomla!","Drupal","Ghost"]
	out={}
	for t in target:
		out[t]=[]
	for k in cno_tmpl_set:
		tmpl=cno_tmpl_set[k]
		if not tmpl:
			continue
		tmpl=tmpl.replace("'","").replace('"','')
		for t in target:
			if tmpl.find(t)!=-1:
				minor=tmpl.split(" ")
				minor=[v.strip(",") for v in minor]
				if len(minor)>=2:
					minor=minor[1]
				else:
					break
				out[t].append(minor)
				break
	for k in out:
		r=Counter(out[k])
		r=sorted(r.iteritems(), key=lambda d:d[1], reverse = True)[0:50]
		out[k]=r
	return out

def server_usage():
	for i in all_cno_set:
		cno_set=all_cno_set[i]
		cno_set=init_server_version(cno_set)
		r=count_server(cno_set)
		r1=count_server_minor(cno_set)
		f=open("data/server_usage_%s.csv"%(i),"w")
		for it in r:
			f.write("%s,%s\n"%(it[0],it[1]))
		f.close()
		f=open("data/server_minor_usage_%s.csv"%(i),"w")
		for c in r1:
			f.write("%s,%s,%s\n"%(c,"minor","no"))
			for it in r1[c]:
				f.write("%s,%s,%s\n"%(c,it[0],it[1]))
		f.close()

def tmpl_usage():
	for i in all_cno_set:
		cno_set=all_cno_set[i]
		cno_set=init_tmpl_version(cno_set)
		r=count_tmpl(cno_set)
		r1=count_tmpl_minor(cno_set)
		f=open("data/tmpl_usage_%s.csv"%(i),"w")
		for it in r:
			f.write("%s,%s\n"%(it[0],it[1]))
		f.close()
		f=open("data/tmpl_minor_usage_%s.csv"%(i),"w")
		for c in r1:
			f.write("%s,%s,%s\n"%(c,"minor","no"))
			for it in r1[c]:
				f.write("%s,%s,%s\n"%(c,it[0],it[1]))
		f.close()

def web_down_time(tm_ip):
	st=sorted(tm_ip)[0]
	ed=sorted(tm_ip)[-1]
	st_id=tm_index.index(st)
	ed_id=tm_index.index(ed)
	etime=tm_index[st_id:ed_id+1]
	rtime=sorted(tm_ip)
	dtime=list(set(etime)-set(rtime))
	down_dur=len(dtime)
	return down_dur

def web_all_down_time():
	for i in all_cno_set:
		tmp=[]
		all_ip_tm_set={}
		cno_set=all_cno_set[i]
		for cno in cno_set:
			tm_ip=matrix[cno]
			dur=web_down_time(tm_ip)
			tmp.append(dur)
		CDF("data/%s_%s"%("web_down_time",i),tmp).go()

def web_down_reason(tm_ip,cno):
	st=sorted(tm_ip)[0]
	ed=sorted(tm_ip)[-1]
	st_id=tm_index.index(st)
	ed_id=tm_index.index(ed)
	etime=tm_index[st_id:ed_id+1]
	rtime=sorted(tm_ip)
	dtime=list(set(etime)-set(rtime))
	down_dur=len(dtime)
	if down_dur==0:
		return False,False
	tmp_tm=[]
	for tm in tm_index[st_id:ed_id+1]:
		if tm in dtime:
			tmp_tm.append("#")
		else:
			tmp_tm.append(tm)
	bip=[]
	aip=[]
	pos=-1
	ip_tm_set={}
	for tm_id in xrange(len(tmp_tm)-1):
		ctm=tmp_tm[tm_id]
		ntm=tmp_tm[tm_id+1]
		pos+=1
		if ctm!="#" and ntm=="#":
			bip=tm_ip[ctm]
			continue
		if ctm=="#" and ntm!="#":
			aip=tm_ip[ntm]
			cip=set(aip)&set(bip)
			if len(cip)!=0:
				ip=list(cip)[0]
				tm=tm_index[pos]
				k=(ip,tm)
				ip_tm_set[k]={"code":None,"cno":cno}
			continue
	if len(ip_tm_set)!=0:
		return True,ip_tm_set
	return True,False

	
def web_all_down_reason():
	for i in all_cno_set:
		tmp=[]
		all_ip_tm_set={}
		cno_set=all_cno_set[i]
		none_no=0
		for cno in cno_set:
			tm_ip=matrix[cno]
			down,tmp_ip_tm=web_down_reason(tm_ip,cno)
			if down:
				if tmp_ip_tm:
					all_ip_tm_set=dict(all_ip_tm_set,**tmp_ip_tm)
				else:
					none_no+=1
		res=get_code(all_ip_tm_set)
		res=res.values()
		out={}
		for d in res:
			if d["cno"] not in out:
				out[d["cno"]]=[]
			out[d["cno"]].append(d["code"])
		for k in out:
			if len(out[k])>1:
				if '0' in out[k]:
					out[k]='0'
					continue
				if 'None' in out[k]:
					out[k]=None
					continue
				if '' in out[k]:
					out[k]=''
					continue
				if '500' in out[k]:
					out[k]='500'
					continue
				out[k]=out[k][-1]
			else:
				out[k]=out[k][0]
		out=out.values()
		out=Counter(out)
		out[None]+=none_no
		out[None]+=out['200']
		out.pop('200')
		f=open("data/web_down_reason_%s.csv"%i,"w")
		for k in sorted(out):
			f.write("%s,%s\n"%(k,out[k]))
		f.close()

def life_time(tm_ip):
	st=sorted(tm_ip)[0]
	ed=sorted(tm_ip)[-1]
	st_id=tm_index.index(st)
	ed_id=tm_index.index(ed)
	return len(tm_ip),ed_id-st_id+1

def maintain_time(tm_ip):
	st=sorted(tm_ip)[0]
	ed=sorted(tm_ip)[-1]
	st_id=tm_index.index(st)
	ed_id=tm_index.index(ed)
	etime=tm_index[st_id:ed_id+1]
	rtime=sorted(tm_ip)
	mtime=list(set(etime)-set(rtime))

	tmp=[]
	for tm in tm_index[st_id:ed_id+1]:
		if tm in mtime:
			tmp.append("#")
		else:
			tmp.append(len(tm_ip[tm]))
	out=[tmp[0]]
	for t in tmp:
		if t=="#" and t!=out[-1]:
			out.append(t)
		if t!="#":
			out.append(t)
	out="".join([str(v) for v in out])
	while out.find("#")!=-1:
		pos=out.find("#")
		p=0
		if pos==len(out)-2:
			p=out[pos+1]
		else:
			p=out[pos+2]
		if out[pos-1]!=p:
			return True
		out=out[pos+1:]
	return False

def category_user():
	print len(matrix)
	for i in all_cno_set:
		count={0:0,1:0,2:0,3:0,4:0,5:0}
		all_ip_tm_set={}
		cno_set=all_cno_set[i]
		none_no=0
		out1=[]
		out2=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_stable(tm_ip):
				count[0]+=1
				continue
			if pt_ab_increase(tm_ip):
				count[1]+=1
				r,e=life_time(tm_ip)
				if r!=e:
					out1.append(maintain_time(tm_ip))
				continue
			if pt_ab_decrease(tm_ip):
				count[2]+=1
				r,e=life_time(tm_ip)
				if r!=e:
					out2.append(maintain_time(tm_ip))
				continue
			if pt_peak_once(tm_ip):
				count[3]+=1
				continue
			if pt_valley_once(tm_ip):
				count[4]+=1
				continue
			count[5]+=1
		f=open("data/user_cate_%s.csv"%i,"w")
		for k in sorted(count):
			f.write("%s,%s\n"%(k,count[k]))
		f.close()
		f=open("data/change_after_maintain_%s.csv"%i,"w")
		f.write("%s,%s,%s\n"%("time","increase","decrease"))
		for k in sorted(out1):
			f.write("%s,%s,%s\n"%(k,out1[k],out2[k]))
		f.close()

def web_churn_2first():
	for ino in all_cno_set:
		cno_set=all_cno_set[ino]
		tm_web={}
		out=[]
		for cno in matrix:
			if cno not in cno_set:
				continue
			tm_ip=matrix[cno]
			for tm in tm_ip:
				if tm in tm_web:
					tm_web[tm].append(cno)
				else:
					tm_web[tm]=[cno]
		r="time,no_of_web,web_stable,web_in,web_out,web_inc\n"
		out.append(r)
		txindex=sorted(xindex.keys())
		st_wb=set(tm_web[txindex[0]])
		for i in xrange(len(txindex)):
			cur_wb=set(tm_web[txindex[i]])
			stable_wb=cur_wb&st_wb
			new_wb=cur_wb-st_wb
			out_wb=st_wb-cur_wb
			r = "%s,%s,%s,%s,%s,%s\n"%(txindex[i],len(cur_wb),len(stable_wb),len(new_wb),len(out_wb),len(new_wb)-len(out_wb))
			out.append(r)
		f=open("data/wb_churn.%s.csv"%(ino),"w")
		f.writelines(out)
		f.close()

def web_churn_day():
	for ino in all_cno_set:
		cno_set=all_cno_set[ino]
		tm_web={}
		out=[]
		for cno in matrix:
			if cno not in cno_set:
				continue
			tm_ip=matrix[cno]
			for tm in tm_ip:
				if tm in tm_web:
					tm_web[tm].append(cno)
				else:
					tm_web[tm]=[cno]
		r="time,no_of_web,web_stable,web_in,web_out,web_inc\n"
		out.append(r)
		txindex=sorted(xindex.keys())
		st_wb=set(tm_web[txindex[0]])
		for i in xrange(len(txindex)):
			cur_wb=set(tm_web[txindex[i]])
			#pre_wb=set(tm_web[txindex[i-1]])
			stable_wb=cur_wb&st_wb
			new_wb=cur_wb-st_wb
			out_wb=st_wb-cur_wb
			r = "%s,%s,%s,%s,%s,%s\n"%(txindex[i],len(cur_wb),len(stable_wb),len(new_wb),len(out_wb),len(new_wb)-len(out_wb))
			out.append(r)
			st_wb=cur_wb
		f=open("data/wb_churn_day.%s.csv"%(ino),"w")
		f.writelines(out)
		f.close()

def ip_update(tm_ip):
	st=sorted(tm_ip)[0]
	st_ip=tm_ip[st]
	c=0
	for tm in sorted(tm_ip):
		op=abs(len(tm_ip[tm])-len(set(tm_ip[tm])&set(st_ip)))
		st_ip=tm_ip[tm]
		c+=op
	return c

def ip_increase(tm_ip):
	tmp=[]
	out=[]
	c=0
	for tm in sorted(tm_ip):
		tmp.append(len(tm_ip[tm]))
	"""
	for k in xrange(len(tmp)-1):
		if tmp[k+1]>tmp[k]:
			out.append(tmp[k+1]-tmp[k])
			c+=1
	
	if c==0:
		return False
	return int(sum(out)/c)
	"""
	c=max(tmp)-min(tmp)
	if c==0:
		return False
	return c

def ip_no_diff(tm_ip):
	tmp=[]
	out=[]
	c=0
	for tm in sorted(tm_ip):
		tmp.append(len(tm_ip[tm]))
	c=max(tmp)-min(tmp)
	if c==0:
		return False
	return c

def stable_web_ip_update():

	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_stable(tm_ip):
				target_cno[cno]=tm_ip
				continue
			
		for cno in target_cno:
			res.append(ip_update(target_cno[cno]))
		CDF("data/%s_%s"%("stable_web_ip_update",i),res).go()


def increase_web_speed():

	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_ab_increase(tm_ip):
				target_cno[cno]=tm_ip
				continue
		for cno in target_cno:
			r=ip_no_diff(target_cno[cno])
			if r:
				res.append(r)
		CDF("data/%s_%s"%("increase_web_speed",i),res).go()

def decrease_web_speed():

	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_ab_decrease(tm_ip):
				target_cno[cno]=tm_ip
				continue
		for cno in target_cno:
			r=ip_no_diff(target_cno[cno])
			if r:
				res.append(r)
		CDF("data/%s_%s"%("decrease_web_speed",i),res).go()

def peak_web_speed():
	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_peak_once(tm_ip):
			#if pt_valley_once(tm_ip):
				target_cno[cno]=tm_ip
				continue
		for cno in target_cno:
			r=ip_no_diff(target_cno[cno])
			if r:
				res.append(r)
		CDF("data/%s_%s"%("peak_web_speed",i),res).go()

def total_web_speed():
	
	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_stable(tm_ip):
				continue
			"""
			if pt_ab_increase(tm_ip):
				continue
			if pt_ab_decrease(tm_ip):
				continue
			if pt_valley_once(tm_ip):
				continue
			if pt_peak_once(tm_ip):
				continue
			"""
			target_cno[cno]=tm_ip
		for cno in target_cno:
			r=ip_no_diff(target_cno[cno])
			if r:
				res.append(r)
		CDF("data/%s_%s"%("total_web_speed",i),res).go()
	
def peak_web_time():
	
	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_stable(tm_ip):
				continue
			if pt_ab_increase(tm_ip):
				continue
			if pt_ab_decrease(tm_ip):
				continue
			if pt_peak_once(tm_ip):
			#if pt_valley_once(tm_ip):
				target_cno[cno]=tm_ip
				continue
		for cno in target_cno:
			tm_ip=target_cno[cno]
			t={}
			for k in tm_ip:
				t[k]=len(tm_ip[k]) 
			if len(set(t))!=1:
				m=sorted(t.items(),key=itemgetter(1),reverse=True)[0][0]
				res.append(m)
		c=Counter(res)
		f=open("data/peak_web_time_%s.csv"%i,"w")
		for k in sorted(xindex):
			if k in c:
				f.write("%s,%s\n"%(k,c[k]))
			else:
				f.write("%s,%s\n"%(k,0))
		f.close()

def valley_web_time():
	
	for i in all_cno_set:
		cno_set=all_cno_set[i]
		target_cno={}
		res=[]
		for cno in cno_set:
			tm_ip=matrix[cno]
			if pt_stable(tm_ip):
				continue
			if pt_ab_increase(tm_ip):
				continue
			if pt_ab_decrease(tm_ip):
				continue
			if pt_peak_once(tm_ip):
				continue
			if pt_valley_once(tm_ip):
				target_cno[cno]=tm_ip
				continue
		for cno in target_cno:
			tm_ip=target_cno[cno]
			t={}
			for k in tm_ip:
				t[k]=len(tm_ip[k]) 
			if len(set(t))!=1:
				m=sorted(t.items(),key=itemgetter(1),reverse=False)[0][0]
				res.append(m)
		c=Counter(res)
		f=open("data/valley_web_time_%s.csv"%i,"w")
		for k in sorted(xindex):
			if k in c:
				f.write("%s,%s\n"%(k,c[k]))
			else:
				f.write("%s,%s\n"%(k,0))
		f.close()

if __name__ == '__main__':
	server_usage()
	tmpl_usage()
	web_all_down_time()
	web_all_down_reason()
	category_user()
	web_churn_2first()
	web_churn_day()
	stable_web_ip_update()
	increase_web_speed()
	decrease_web_speed()
	peak_web_speed()
	total_web_speed()
	peak_web_time()
	valley_web_time()

