#!/usr/bin/python

#Created by turk 2015-12-08
#Modified by turk 2016-03-01
#Port Scan [PYTHON]
#Version 1.0 

import os
import commands
import sys
import logging
import time
import datetime

TODAY = datetime.datetime.now()
TODAY = datetime.datetime.strftime(TODAY, '%Y%m%d')

WORKPATH='/home/bsmp/work'
SPARKLIB=WORKPATH + '/lib'
LOGPATH=WORKPATH + '/log'
LOGFILE=LOGPATH + '/portScanAly_' + TODAY +'.log'
#ZOOSERVER='shanghai-cm-7'

#HDFS
SDSDBPATH="/user/hive/warehouse/sds.db"

#Logger
logging.basicConfig(level=logging.DEBUG,  
                    format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',  
                    datefmt='%Y-%m-%d %H:%M:%S',  
                    filename=LOGFILE,  
                    filemode='w')  
console = logging.StreamHandler()  
console.setLevel(logging.INFO)  
# set print format
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')  
console.setFormatter(formatter)  
#
logging.getLogger('').addHandler(console)  
logger1 = logging.getLogger('sds.portscan')                     
                    




def HiveSql(ZOOSERVER,PORT,IDC,CURDATE,DBSERVER):
    OUTPUTTABLE="sds.temp_"+CURDATE+"_"+PORT+"_sourceip"
    TEMPTABLE1="sds.temp_"+CURDATE+"_"+PORT+"_sourceip_1"
    #ALTABLE="cu_v2.idc_access_log_"+IDC+"_"+CURDATE
    ALTABLE="idc_access_log_his"
    
    SQL="""hive -e \"drop table """+TEMPTABLE1+""";
        create table """+ TEMPTABLE1 +""" as
        select r3.sourceip from (
        select sourceip,row_Number() over(order by n desc) rank from (
        select sourceip, count(*) n from (
        select sourceip,destip from """+ALTABLE+""" 
        where destport = """+PORT+""" and IDC='"""+IDC+"""' and dt = '"""+CURDATE+"""' and hour between 0 and 4
        group by sourceip,destip) r1 
        group by sourceip) r2 where n >= 1000) r3  where rank <= 50;\""""
    logger1.info(SQL)
    RET=commands.getstatusoutput(SQL)
    logger1.info(RET)
    
    
    SQL="""hive -e \"drop table """+OUTPUTTABLE+""";
        create table """+ OUTPUTTABLE +""" ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' as 
        select * from (
        Select sourceip,destport,n,row_number() over(partition by sourceip order by n desc) rank from (
        Select a.sourceip,destport,count(*) n from """+ALTABLE+""" a 
        join """+TEMPTABLE1+""" b on a.sourceip = b.sourceip
        where IDC = '"""+IDC+"""' 
        and dt = '"""+CURDATE+"""'
        and hour between 0 and 4
        Group by a.sourceip,destport  ) p1) p2 where destport = """+PORT+""" and rank = 1;\""""
    logger1.info(SQL)
    RET=commands.getstatusoutput(SQL)
    logger1.info(RET)
    
    SOURCEPATH = SDSDBPATH + "/" + "temp_"+CURDATE+"_"+PORT+"_sourceip"
    TAG="PORT" + PORT
    
    ##<SourcePath> <ZOOSERVER>
    #SPARKCOMMAND="""spark-submit --class \"com.aotain.project.apollo.spark.SourceIPBlackListAly\" --master yarn --num-executors 4 --driver-memory 4G --executor-memory 8G --conf \"spark.executor.extraJavaOptions=-Dspring.profiles.active=production\" --conf \"spark.executor.extraClassPath=/opt/cloudera/parcels/CDH/lib/hbase/lib/htrace-core-3.1.0-incubating.jar\" --driver-class-path \"/opt/cloudera/parcels/CDH/lib/hbase/lib/htrace-core-3.1.0-incubating.jar\" --conf \"spark.log.file=basestat\" """+ SPARKLIB +"""/hades-project-0.0.1-SNAPSHOT.jar """+ SOURCEPATH +""" """ + ZOOSERVER + """ """ + TAG + """ """ + DBSERVER 
    #logger1.info(SPARKCOMMAND)
    #try:
    #    RET=commands.getstatusoutput(SPARKCOMMAND)
    #except Exception,ex:  
    #    print Exception,":",ex  
    #    logger1.error(ex)
    #logger1.info(RET)
    

if __name__ == '__main__':
    print sys.argv  
    if len(sys.argv) < 4:
        print "Usage:<ZSERVER> <PORT> <IDC> <CURTIME> <DBSERVER>"
        sys.exit()
    
    ZOOSERVER=sys.argv[1]
    PORT=sys.argv[2]
    IDC=sys.argv[3]
    CURTIME=sys.argv[4]
    DBSERVER=sys.argv[5]
        
    HiveSql(ZOOSERVER,PORT,IDC,CURTIME,DBSERVER)
     
    print 'COMPLATE'
