#!/bin/bash

#Created by cheng 2016-06-16
#Project Apollo V1 SdsIpTi

echo `date +"%Y-%m-%d %H:%M:%S"`  "------------------------------------" | tee -a $LOGFILE
echo `date +"%Y-%m-%d %H:%M:%S"`  "-----------Project Apollo V1 SdsIpTi--------" | tee -a $LOGFILE
echo `date +"%Y-%m-%d %H:%M:%S"`  "Shell Version 1.0 update(2016-06-16)" | tee -a $LOGFILE
echo `date +"%Y-%m-%d %H:%M:%S"`  "------------------------------------" | tee -a $LOGFILE

if [ $# -lt 1 ]; then
        echo `date +"%Y-%m-%d %H:%M:%S"`  "Usage: <ZK>"
        exit 1
fi


ZK=$1
WORKPATH=/usr/local/storm10
JAVALIB=${WORKPATH}/topolib/
DATE=`date -d "1 day ago" +%Y%m%d`
LOGFILE=/home/spark/logs/apollo_SdsIpTi_${DATE}.log


hive -e "drop table if exists tmp_sds_ip_ti_1;"  > $LOGFILE 2>&1

hive -e "drop table if exists tmp_sds_ip_ti_2;"  > $LOGFILE 2>&1

hive -e "create table if not exists tmp_sds_ip_ti_1(sourceip string, num int) row format delimited fields terminated by ',';" > $LOGFILE 2>&1

hive -e "insert into table tmp_sds_ip_ti_1 select sourceip ,count(distinct destip) n from idc_access_log_his where dt='$DATE' and hour between 2 and 6 group by sourceip having count(distinct destip) > 1000 order by n desc limit 50 ;" > $LOGFILE 2>&1

hive -e "create table if not exists tmp_sds_ip_ti_2(sourceip string, destport string, dpnum int , dipnum int) row format delimited fields terminated by ',';" > $LOGFILE 2>&1

hive -e "insert into table tmp_sds_ip_ti_2 select a.sourceip , a.destport,count(a.destport) n ,count(distinct a.destip) m from idc_access_log a left outer join tmp_sds_ip_ti_1 b on a.sourceip = b.sourceip  where  b.sourceip is not null and a.dt='$DATE' and a.hour between 2 and 6 and (a.destport = '22' or a.destport = '3389' or a.destport = '1433' or a.destport = '53' or a.destport = '25' or a.destport = '21' or a.destport = '135' or a.destport = '445') group by a.sourceip, a.destport ;" > $LOGFILE 2>&1

spark-submit --class com.aotain.project.apollo.spark.SdsIpTiSpark --deploy-mode cluster --master yarn --num-executors 4 --driver-memory 4g --executor-memory 4g --executor-cores 1  --conf "spark.executor.extraClassPath=/opt/cloudera/parcels/CDH/lib/hbase/lib/htrace-core-3.1.0-incubating.jar" --driver-class-path "/opt/cloudera/parcels/CDH/lib/hbase/lib/htrace-core-3.1.0-incubating.jar"  --queue thequeue /usr/local/storm10/topolib/hades-project-0.0.1-SNAPSHOT.jar /user/hive/warehouse/tmp_sds_ip_ti_2 $ZK > $LOGFILE 2>&1


echo `date +"%Y-%m-%d %H:%M:%S"`      "Exec Success [$DATE]!" | tee -a $LOGFILE

exit 0
