#!/usr/bin/python

#Created by turk 2016-03-10
#Domain Record [PYTHON]
#Version 1.0 

import os
import commands
import sys
import logging
import time
import datetime

LOGDATE = datetime.datetime.now()
LOGDATE = datetime.datetime.strftime(LOGDATE, '%Y%m%d')

WORKPATH='/home/bsmp/work'
SPARKLIB=WORKPATH + '/lib'
LIB=WORKPATH + '/lib'
LOGPATH=WORKPATH + '/log'
LOGFILE=LOGPATH + '/domainrecord_'+LOGDATE+'.log'
ORACLE_IP="dg-u2-db-1"
ORACLE_USERNAME="sds"
ORACLE_PW="h16aug8v3w"
ORACLE_TNS="bsmp"

#HDFS
SDSDBPATH="/user/hive/warehouse/sds.db"

#LOCAL
DATAPATH="/home/bsmp/work/data/delay"


#Logger
logging.basicConfig(level=logging.DEBUG,  
                    format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',  
                    datefmt='%Y-%m-%d %H:%M:%S',  
                    filename=LOGFILE,  
                    filemode='w')  
console = logging.StreamHandler()  
console.setLevel(logging.INFO)  
# set print format
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')  
console.setFormatter(formatter)  
#
logging.getLogger('').addHandler(console)  
logger1 = logging.getLogger('test.delay')                     
                    


def ExportData(PROVINCEID):
    
    ONEDAY = datetime.timedelta(days=1)
    TODAY = datetime.datetime.now()
    YESTERDAY = datetime.datetime.now() - ONEDAY
    YESTERDAY = datetime.datetime.strftime(YESTERDAY, '%Y%m%d')
    
    ##HIVE SQL 
    SQL="""hive -e \"drop table sds.mod_domainrecord_hive;
            create table sds.mod_domainrecord_hive ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' as 
            select * from (
            select IDC,domainname,destip,num/sum(num) over(partition by IDC,domainname)*100 rate from
            (select IDC,domainname,destip,count(*) num 
            from idc_access_log where dt = '"""+YESTERDAY+"""' and hour between 11 and 12 and domainname <> ''
            
            group by domainname,destip,IDC) r1) r2 where rate > 20; \""""

    logger1.info(SQL)
    RET=commands.getstatusoutput(SQL)
    logger1.info(RET)
    
    ##SPARK 
    INPUTPATH = SDSDBPATH + "/mod_domainrecord_hive"
    OUTPUTPATH = SDSDBPATH+ "/mod_domainrecord_oracle"
    OUTPUTPATHII = SDSDBPATH+ "/mod_domainrecord_oracle_ii"
    
    SPARKCMD="""spark-submit --class com.aotain.project.apollo.spark.DomainIPAly --deploy-mode cluster --master yarn --num-executors 8 --driver-memory 2g --executor-memory 2g --executor-cores 1 --queue thequeue """+LIB+"""/hades-project-0.0.1-SNAPSHOT.jar """+INPUTPATH+""" """+ OUTPUTPATH + """ """+ORACLE_IP + """ """ + PROVINCEID
    logger1.info(SPARKCMD)
    RET=commands.getstatusoutput(SPARKCMD)
    logger1.info(RET)
    
    #Modified by turk 2015-05-27 Add union table sds_http_stat_h + mod_domainrecord_oracle import oracle
    SQL="""hive -e \"drop table sds.mod_domainrecord_oracle_ii;
            create table sds.mod_domainrecord_oracle_ii ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' as
            select a.reportdate,a.idc,a.domain,a.ip,a.webrecordnum,a.websitename,a.company,a.companytype,a.isiprecord,a.idcname,
            a.cityname,a.iparea,a.recordnum,a.ipcompany,a.ipsegcompany
            ,case when b.httpnum is null then 0 else b.httpnum end httpnum from sds.mod_domainrecord_oracle  a left outer join 
            (select date,domain,ip,sum(httpnum) httpnum from 
                            sds.sds_http_stat_h where date = '"""+YESTERDAY+"""' group by date,domain,ip) b
                        on b.ip = a.ip and b.domain=a.domain;\""""

    logger1.info(SQL)
    RET=commands.getstatusoutput(SQL)
    logger1.info(RET)
    
    ##SQOOP
    os.environ['HADOOP_CLASSPATH']=str(LIB + "/ojdbc14.jar")
    SQOOPCMD = """sqoop export --connect jdbc:oracle:thin:@"""+ORACLE_IP+""":1521:"""+ORACLE_TNS+""" --username """+ORACLE_USERNAME+""" --password """+ORACLE_PW+""" --table SDS_DOMAINIP_RECORD --columns REPORTDATE,IDC,DOMAINNAME,ip,WEBRECORDNUM,SITENAME,COMPANY,COMPANYTYPE,IPISRECORD,IDCNAME,ACCESSAREA,IPAREA,RECORDNUM,IPCOMPANY,HTTPNUM,IPSEGCOMPANY --export-dir """+OUTPUTPATHII+""" --fields-terminated-by '|' -m 2"""
    logger1.info(SQOOPCMD)
    RET=commands.getstatusoutput(SQOOPCMD)
    logger1.info(RET)
    

if __name__ == '__main__':
    #print sys.argv  
    #if len(sys.argv) <5:
    #    print "Usage:<DESTIP> <SOURCEIP> <DATE> <STARTHOUR> <ENDHOUR>"
    #    sys.exit()
    
    PROVINCEID=sys.argv[1]
    #SOURCEIP=sys.argv[2]
    #DATE=sys.argv[3]
    #STARTHOUR=sys.argv[4]
    #ENDHOUR=sys.argv[5]
        
    ExportData(PROVINCEID)
     
    print 'COMPLATE'
