#!/bin/bash

source /etc/profile

if [ $# -eq 1 ]; then
	targetHour=$1
else
	targetHour=`date -d "-1 hours" +"%Y%m%d%H"`
fi
path=$(cd $(dirname $0);pwd)
parentPath=$(dirname ${path})
cd ${parentPath}
targetDate=${targetHour:0:8}
if [ ! -d "${path}/${targetDate}" ];then
	mkdir -p ${path}/${targetDate}
fi
day=$(date -d"${targetDate}" "+%Y-%m-%d")
#全部来源
#按照来源类型
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][referer_type][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_anyway_refererType' select a.cid,a.sid,${targetDate},'all','refererType',a.refererType,sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),'0','' from (select cid,sid,session,min(struct(time,refererType)).col2 as refererType,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a group by a.cid,a.sid,a.refererType"
#按照来源网站
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function geturlhost as 'com.nsw.udf.GetUrlHost';create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][refererUrl][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_anyway_refererUrl' select a.cid,a.sid,${targetDate},'all','refererUrl',geturlhost(a.refererUrl),sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),'0','' from (select cid,sid,session,min(struct(time,referer)).col2 as refererUrl,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where geturlhost(a.refererUrl)<>'unknown' group by a.cid,a.sid,geturlhost(a.refererUrl)"
#搜索引擎
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][SearchEngines][each][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_search' select a.cid,a.sid,${targetDate},'searchEngines','each',a.refererInfo.col3,sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),'0','' from (select cid,sid,session,min(struct(time,refererType,se)) as refererInfo,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.refererInfo.col2=2 group by a.cid,a.sid,a.refererInfo.col3"
#搜索引擎概览
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][SearchEngines][overview][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_seoverview' select a.cid,a.sid,${targetDate},'searchEngines','overview','all',sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),'0','' from (select cid,sid,session,min(struct(time,refererType)).col2 as refererType,min(struct(time,se)).col2 as se,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.refererType=2 group by a.cid,a.sid"
#搜索词
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][query][each][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_queryeach' select a.cid,a.sid,${targetDate},'query','each',a.query,sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),max(a.ispay) as ispay,a.se from (select cid,sid,session,min(struct(time,query)).col2 as query,min(struct(time,query,ispay)).col3 as ispay,min(struct(time,query,se)).col3 as se,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.query is not null and length(a.query)>0 group by a.cid,a.sid,a.query,a.se"
#搜索词概览
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][query][overview][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_queryoverview' select a.cid,a.sid,${targetDate},'query','overview','all',sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),'0','' from (select cid,sid,session,min(struct(time,query)).col2 as query,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.query is not null and length(a.query)>0 group by a.cid,a.sid"
#各个搜索引擎搜索词概览
hive -e "set mapred.job.name=[NAS][referer][query][overview_se][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_queryoverviewse' select a.cid,a.sid,${targetDate},'query','overview',a.query.col3,sum(a.pv),0,0,0,0,0,0,max(a.query.col4),'' from (select cid,sid,session,min(struct(time,query,se,ispay)) as query,1 as pv from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.query.col2 is not null and length(a.query.col2)>0 and a.query.col3 is not null and length(a.query.col3)>0 group by a.cid,a.sid,a.query.col3"
#外部链接
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][out][each][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_outeach' select a.cid,a.sid,${targetDate},'out','each',a.refererInfo.col2,sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions) ,'0','' from (select cid,sid,session,min(struct(time,referer,refererType)) as refererInfo,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.refererInfo.col3=3 and length(a.refererInfo.col2)<1024 group by a.cid,a.sid,a.refererInfo.col2"
#外部链接概览
hive -e "add files ${parentPath}/page.txt ${parentPath}/event.txt ${parentPath}/time.txt ${parentPath}/pagenum.txt;add jar ${parentPath}/lib/NASUDF-0.0.1.jar;create temporary function getconversion as 'com.nsw.udf.GetConversion';set mapred.job.name=[NAS][referer][out][overview][${targetDate}];use formatlog;insert overwrite local directory '${path}/${targetDate}/${targetHour}_overview' select a.cid,a.sid,${targetDate},'out','overview','all',sum(a.pv),count(distinct a.nasid),count(distinct ip),sum(if(a.pv=1,1,0)) as bounce,sum(a.visit_time)/sum(a.pv) as avg_visittime,count(distinct a.session),sum(a.conversions),'0','' from (select cid,sid,session,min(struct(time,refererType)).col2 as refererType,max(nasid) as nasid,max(ip) as ip,((max(time)-min(time))/1000+60) as visit_time,count(1) as pv,getconversion(cid,sid,concat_ws(',',collect_set(ep)),concat_ws(',',collect_list(cast(eventid as string))),(max(time)-min(time))/1000+60) as conversions from nas_tracking_format_hour where day=${targetDate} group by cid,sid,session) a where a.refererType=3 group by a.cid,a.sid"
wait
# 合并文件
cat ${path}/${targetDate}/${targetHour}_anyway_refererType/* > ${path}/${targetDate}/${targetHour}_nas_referer_referertype
cat ${path}/${targetDate}/${targetHour}_anyway_refererUrl/* > ${path}/${targetDate}/${targetHour}_nas_referer_refererurl
cat ${path}/${targetDate}/${targetHour}_search/* > ${path}/${targetDate}/${targetHour}_nas_referer_searchengines
cat ${path}/${targetDate}/${targetHour}_seoverview/* > ${path}/${targetDate}/${targetHour}_nas_referer_searchengines_overview
cat ${path}/${targetDate}/${targetHour}_queryeach/* > ${path}/${targetDate}/${targetHour}_nas_referer_searchkeyword
cat ${path}/${targetDate}/${targetHour}_queryoverview/* > ${path}/${targetDate}/${targetHour}_nas_referer_searchkeyword_overview
cat ${path}/${targetDate}/${targetHour}_queryoverviewse/* > ${path}/${targetDate}/${targetHour}_nas_referer_searchkeyword_searchengines_overview
cat ${path}/${targetDate}/${targetHour}_outeach/* > ${path}/${targetDate}/${targetHour}_nas_referer_externalsources
cat ${path}/${targetDate}/${targetHour}_overview/* > ${path}/${targetDate}/${targetHour}_nas_referer_externalsources_overview
wait
# load data to mysql
source ${parentPath}/configs/mysql_config
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_referertype ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_referertype"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_refererurl ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_refererurl"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_searchengines ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_searchengines"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_searchengines_overview ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_searchengines_overview"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_searchkeyword ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_searchkeyword"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_searchkeyword_overview ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_searchkeyword_overview"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_searchkeyword_searchengines_overview ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_searchkeyword_searchengines_overview"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_externalsources ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_externalsources"
python ${path}/load_referer_to_mysql_v2.py ${path}/${targetDate}/${targetHour}_nas_referer_externalsources_overview ${host} ${port} ${user} ${password} ${db} ${day} "nas_referer_externalsources_overview"