#!/bin/bash

source /etc/profile

if [ $# -eq 1 ]; then
	targetHour=$1
else
	targetHour=`date -d "-1 hours" +"%Y%m%d%H"`
fi
path=$(cd $(dirname $0);pwd)
parentPath=$(dirname ${path})
cd ${parentPath}
targetDate=${targetHour:0:8}
if [ ! -d "${path}/${targetDate}" ];then
	mkdir -p ${path}/${targetDate}
fi
day=$(date -d"${targetDate}" "+%Y-%m-%d")
# get event config
source ${parentPath}/configs/mysql_config
python ${path}/get_event_config.py ${host} ${port} ${user} ${password} ${db} > ${path}/event_config.txt
wait
#关键词筛选
#hive -e "set mapred.job.name=[NAS][tools][filter_keyword][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword' select cid,sid,${targetDate},0,se,query from nas_tracking_format_hour where day=${targetDate} and se is not null and length(se)>0 and query is not null and length(query)>0 group by cid,sid,se,query"
#hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][tools][filter_keyword][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword' select a.cid,a.sid,${targetDate},0,a.info.col2,a.info.col3,sum(a.pv),count(distinct a.nasid),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions) from (select cid,sid,session,min(struct(time,se,query)) as info,count(1) as pv,max(nasid) as nasid,((max(time)-min(time))/1000+60) as visit_time,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where length(a.info.col2)>0 and length(a.info.col3)>0 group by a.cid,a.sid,a.info.col2,a.info.col3"
#hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][tools][filter_keyword][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword' select a.cid,a.sid,${targetDate},0,a.info.col2,a.info.col3,sum(a.pv),count(distinct a.nasid),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),if(max(a.info.col4)="True",1,0) as ispay from (select cid,sid,session,min(struct(time,se,query,ispay)) as info,count(1) as pv,max(nasid) as nasid,((max(time)-min(time))/1000+60) as visit_time,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where length(a.info.col2)>0 and length(a.info.col3)>0 group by a.cid,a.sid,a.info.col2,a.info.col3"
hive -e "set mapred.job.name=[NAS][tools][filter_keyword][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword' select cid,sid,${targetDate},0,se,query,count(1),count(distinct nasid),if(ispay='True',1,0) from nas_tracking_format_hour where day=${targetDate} and length(trim(se))>0 and length(trim(query))>0 group by cid,sid,se,query,ispay"
#询盘关键词筛选
#hive -e "set mapred.job.name=[NAS][tools][filter_keyword_kefu][${targetDate}];use formatlog;add file ${path}/get_customize_keyword.py;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword2' select b.cid,b.sid,b.day,b.type,b.customize,b.query from (select transform(a.cid,a.sid,${targetDate},a.actions) using 'python get_customize_keyword.py' as cid,sid,day,type,customize,query from (select cid,sid,concat_ws('\001',collect_list(concat_ws(',',cast(time as string),cast(customize as string),query))) as actions from nas_tracking_format_hour where day=${targetDate} and ((query is not null and length(query)>0) or (customize>=1 and customize<=3))) group by cid,sid,session) a) b group by b.cid,b.sid,b.day,b.type,b.customize,b.query"
#hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][tools][filter_keyword_kefu][${targetDate}];use formatlog;add file ${path}/get_customize_keyword2.py;select b.cid,b.sid,b.day,b.type,b.customize,b.query,sum(b.pv),count(distinct b.nasid),sum(if(b.pv=1,1,0)) as bounce,sum(b.visit_time)/sum(b.pv) as avg_visittime,count(distinct b.session),sum(b.conversions) from (select transform(a,cid,a.sid,${targetDate},a.actions,a.session,a.pv,a.nasid,a.visit_time,a.conversion) using 'python get_customize_keyword2.py' as cid,sid,day,type,customize,query,session,pv,nasid,visit_time,conversion from (select cid,sid,session,concat_ws('\001',collect_list(concat_ws(',',cast(time as string),cast(customize as string),query))) as actions,count(1) as pv,max(nasid) as nasid,((max(time)-min(time))/1000+60) as visit_time,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a) b group by b.cid,b.sid,b.day,b.type,b.customize,b.query"
#hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][tools][filter_keyword_kefu][${targetDate}];use formatlog;add file ${path}/get_customize_keyword2.py;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword2' select e.cid,e.sid,e.day,e.type,e.customize,e.query,e.sumpv,e.sumnasid,e.bounce,e.avg_visittime,e.sumsession,e.sumconversions,if(f.ispay="True",1,0) from (select b.cid,b.sid,b.day,b.type,b.customize,b.query,sum(b.pv) as sumpv,count(distinct b.nasid) as sumnasid,sum(if(b.pv=1,1,0)) as bounce,sum(b.visit_time)/sum(b.pv) as avg_visittime,count(distinct b.session) as sumsession,sum(b.conversions) as sumconversions from (select transform(a,cid,a.sid,${targetDate},a.actions,a.session,a.pv,a.nasid,a.visit_time,a.conversion) using 'python get_customize_keyword2.py' as cid,sid,day,type,customize,query,session,pv,nasid,visit_time,conversion from (select cid,sid,session,concat_ws('\001',collect_list(concat_ws(',',cast(time as string),cast(customize as string),query))) as actions,count(1) as pv,max(nasid) as nasid,((max(time)-min(time))/1000+60) as visit_time,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a) b group by b.cid,b.sid,b.day,b.type,b.customize,b.query) e join (select cid,sid,query,ispay from nas_tracking_format_hour where day=${targetDate} and cid is not null and sid is not null) f on e.cid = f.cid and e.sid=f.sid and e.query = f.query;"
#hive -e "set mapred.job.name=[NAS][tools][filter_keyword_kefu][${targetDate}];use formatlog;add file ${path}/get_customize_keyword3.py;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword2' select b.cid,b.sid,b.day,b.type,b.customize,b.query,sum(b.pv),count(distinct b.nasid),if(b.ispay='True',1,0) from (select transform(a.cid,a.sid,${targetDate},a.actions,a.nasid,a.pv,a.ispay) using 'python get_customize_keyword3.py' as cid,sid,day,type,customize,query,nasid,pv,ispay from (select cid,sid,concat_ws('\001',collect_list(concat_ws(',',cast(time as string),cast(customize as string),query))) as actions,max(nasid) as nasid,count(1) as pv,max(ispay) as ispay from nas_tracking_format_hour where day=${targetDate} and (length(trim(query))>0 or (customize>=1 and customize<=3)) group by cid,sid,session) a) b group by b.cid,b.sid,b.day,b.type,b.customize,b.query,b.ispay"
hive -e "set mapred.job.name=[NAS][tools][filter_keyword_kefu][${targetDate}];use formatlog;add file ${path}/event_config.txt;add file ${path}/get_customize_keyword4.py;insert overwrite local directory '${path}/${targetDate}/${targetHour}_keyword2' select b.cid,b.sid,b.day,b.type,b.customize,b.query,sum(b.pv),count(distinct b.nasid),if(b.ispay='True',1,0) from (select transform(a.cid,a.sid,${targetDate},a.actions,a.nasid,a.pv,a.ispay) using 'python get_customize_keyword4.py' as cid,sid,day,type,customize,query,nasid,pv,ispay from (select cid,sid,concat_ws('\001',collect_list(concat_ws(',',cast(time as string),cast(eventid as string),query))) as actions,max(nasid) as nasid,count(1) as pv,max(ispay) as ispay from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a) b group by b.cid,b.sid,b.day,b.type,b.customize,b.query,b.ispay"
wait
cat ${path}/${targetDate}/${targetHour}_keyword/* ${path}/${targetDate}/${targetHour}_keyword2/* > ${path}/${targetDate}/${targetHour}_keywords
wait
python ${path}/load_keyword_to_mysql.py ${path}/${targetDate}/${targetHour}_keywords ${host} ${port} ${user} ${password} ${db} ${day}
