#!/usr/bin/env python
#-*-encoding:utf-8-*-
'''
Created on 2015年8月6日

@author: chenyongbing
'''
import sys,os,datetime,re,commands
import logging,urllib2,urllib,json
#current_dir = os.path.dirname(__file__)
current_dir = os.path.split(os.path.realpath(__file__))[0]

from encrypt_customer_upload_file import myEncryptUploadFile


sys.path.append(os.path.join(current_dir,'../'))
from base.time_method import myTimeMethod

from base.zhimind_hive_datas import myZhimindHiveDatas
from base.zhimind_datas import myZhimind
from base.retail_datas import myRetailDatas

from base.PyMySQL import ConnectMySQL
from base.localConfig import baseLocalConfig

zhimind_host = baseLocalConfig.mysqlConfig['zhimind_analysis']['host']
zhimind_port = baseLocalConfig.mysqlConfig['zhimind_analysis']['port']
zhimind_user = baseLocalConfig.mysqlConfig['zhimind_analysis']['user']
zhimind_passwd = baseLocalConfig.mysqlConfig['zhimind_analysis']['passwd']
zhimind_db = baseLocalConfig.mysqlConfig['zhimind_analysis']['db']
myZhimindAnalysis = ConnectMySQL(host=zhimind_host, user=zhimind_user, password=zhimind_passwd, db=zhimind_db,port=zhimind_port)


class analyzeCustomerDemand():
    def __init__(self,testing=False,hiveTab = 'newhivehistoryrole',remoteHost=None,retryTimes=5,token=10002000,wsUrl='http://localhost:8080/zm-analysis-1.0/api/v1/python/dataAnalysisFinished'):
        self.testing = testing
        self.wsUrl = wsUrl
        self.token = token
        self.retryTimes = retryTimes
        self.hiveTab = hiveTab
        self.remoteHost = remoteHost
        self.uuid = ''
        
    def get_request(self,detailMap={}):
        if self.wsUrl==None:return False
        data=urllib.urlencode(detailMap)
        url = self.wsUrl
        
        req=urllib2.Request(url,data)
        response=urllib2.urlopen(req)
        page = response.read()
        return page
        
        
    def get_dayRange_from_dayTimeRange(self,dayTimeRange):
        dayRange = []
        for dayTime in dayTimeRange:
            startDay = dayTime['startDay']
            endDay = dayTime['endDay']
            dayRange.append([startDay,endDay])
        return dayRange


    def get_demand_json_data_by_uuid(self,uuid,tab='analysis'):
        query = 'select job_dispatch_json from %s where uuid="%s"'%(tab,uuid)
        myZhimind.SelectOne()

    def get_demand_from_tab_by_status(self,tab='analysis',status=8):
        query = 'select uuid,job_dispatch_json from %s where status=%s'%(tab,status)
        ret = myZhimindAnalysis.SelectOne(query)
        if ret!=None:
            return ret[0],ret[1]
        else:
            return None


    def update_demand_info(self,tab='analysis',uuid='',export_file='',data_type='mac'):
        data_size = os.path.getsize(export_file)
        line_cnt = len(open(export_file,'rU').readlines())
        status = 1
        if data_type == 'mobile':
            encrypt = 1
        else:
            encrypt = 2
        
        query = "update analysis set data_size='%s',line_cnt='%s',file_path='%s',status='%s',encrypt='%s' where uuid='%s'"%(data_size,line_cnt,export_file,status,encrypt,uuid)
        logging.info(query)
        ret = False
        for i in range(self.retryTimes):
            ret = myZhimindAnalysis.execute(query)
            
            if ret==True:
                logging.info('set uuid(%s) status(%s) success.'%(uuid,status))
                break
#                 logging.info('scp %s %s:%s'%(export_file,self.remoteHost,export_file))
#                 if self.remoteHost:
#                     status,out = commands.getstatusoutput('scp %s %s:%s'%(export_file,self.remoteHost,export_file))
#                     if status == 0:
#                         break
#                     else:
#                         logging.error('retry times %s ,scp file %s to %s faild.'%(i,export_file,self.remoteHost))
#                         ret = False
            else:
                logging.error('retry times %s ,set uuid(%s) status(%s) faild.'%(i,uuid,status))
        if ret == False:
            self.update_demand_status(uuid, status=2)
        detailMap={'Uuid':uuid,'Token':self.token}
        logging.info('uuid:%s Start Post To ws status %s,datas %s'%(uuid,status,str(detailMap)))
        self.get_request(detailMap=detailMap)
    
            
            
        
    def scp_file(self,host='zm12',export_file=None,import_file=None):
        
        if export_file!=None :
            cmd = 'scp %s %s:%s'%(export_file,host,export_file)
            logging.info(cmd)
        elif import_file!=None:
            cmd = 'scp %s:%s %s'%(host,import_file,import_file) 
            logging.info(cmd)
        else:
            logging.error('You Must Input One From Export_file and Import_file.')
            return False
        status,out = commands.getstatusoutput(cmd)
        if status==0:
            logging.info(out)
            return True
        else:
            logging.error(out)
            return False    
        
            
            
    def update_demand_status(self,uuid,tab='analysis',status=9):
        query = 'update %s set status="%s" where uuid="%s"'%(tab,status,uuid)
        logging.info(query)
        ret = False
        for i in range(self.retryTimes):
            ret = myZhimindAnalysis.execute(query)
            
            if ret==True:
                logging.info('set uuid(%s) status(%s) success.'%(uuid,status))
                break
            else:
                logging.error('retry times %s ,set uuid(%s) status(%s) faild.'%(i,uuid,status))
                
            
        if status==2 and ret==True:
            detailMap={'Uuid':uuid,'Token':self.token}
            logging.info('uuid:%s Start Post To ws status %s,datas %s'%(uuid,status,str(detailMap)))
            self.get_request(detailMap=detailMap) 
    
    
    def export_macs_from_hive(self,json_datas={},export_file='',import_file=''):
        uuid = json_datas['uuid']
        taskType = json_datas['taskType']
        dataType = json_datas['dataType']
        scale = json_datas['scale']
        
        companyGroup = json_datas['companyGroup']
        shopGroupList = json_datas['shopGroupList']
      
            
#         shopGroupList = json_datas['shopGroupList']
    #     dayRange = json_datas['dayRange']
    #     timeRange = json_datas['timeRange']
        dayTimeRange = json_datas['dayTimeRange']
        customerType = json_datas['customerType']
        role = json_datas['role']
        
        frequencyRange = json_datas['frequencyRange']
        durationRange = json_datas['durationRange']
        full_query = self.get_hive_query_2(shopGroupList=shopGroupList,dayTimeRange=dayTimeRange,frequencyRange=frequencyRange,durationRange=durationRange,role=role)
        logging.info(full_query)
         
        if self.testing == True:
            return True
        
        dayRange = self.get_dayRange_from_dayTimeRange(dayTimeRange)
        
        new_macs = self.get_new_customer_from_mysql(shopGroupList=shopGroupList,dayRange=dayRange)
        
        rets = myZhimindHiveDatas.myHiveClient.SelectAll(full_query)
   
        logging.info('Found %s Macs.'%len(rets))
        
        datas = []
        
        
        logging.info('Start Write macs to file %s.'%import_file)
        
        fr = open(import_file,'w')
        for mac,sum_freq,avg_dur in rets:
            if role == 3 and customerType == 3:
                if mac not in new_macs:continue
            elif role==3 and customerType == 10:
                if mac in new_macs:continue
            datas.append(mac)
            fr.write('%s\n'%mac)
        fr.close()
        logging.info('Write End,macs %s.'%len(datas))
        return True
            
    def check_json(self,uuid,json_data_str='',feilds=[]):
        try:
            json_data  = json.loads(json_data_str)
        except:
            logging.error('uuid %s json format error.'%uuid)
            
            return False
        keys = json_data.keys()

        
        miss_keys = list((set(feilds) - set(keys)))

        if len(miss_keys)!=0:
            logging.error('uuid %s json miss keys : %s'%(uuid,str(miss_keys)))
            
            return False
        logging.info('uuid %s json check OK.'%uuid)
        companyGroup = json_data['companyGroup']
        if not json_data.has_key('shopGroupList') or json_data['shopGroupList']==None or len(json_data['shopGroupList'])==0 :
            shopGroupList = myRetailDatas.get_groups_by_comp( comp_group=companyGroup)
            json_data['shopGroupList'] = shopGroupList
        if companyGroup == None or companyGroup=='':
            logging.error('uuid(%s) companyGroup is null.'%uuid)
            return False       
        

        taskType = json_data['taskType']    
        
        if len(json_data['shopGroupList']) == 0 and taskType=='system':
            logging.error('uuid(%s) shopGroupList is null.'%uuid)
            return False
        
        if not json_data.has_key('role'):
            json_data['role'] = 0
        if not json_data.has_key('customerType'):
            json_data['customerType'] = 0
        if not json_data.has_key('frequencyRange'):
            json_data['frequencyRange'] = []
        if not json_data.has_key('durationRange'):
            json_data['durationRange'] = []
        
        if not json_data.has_key('dayTimeRange'):
            json_data['dayTimeRange'] = []  
        if not json_data.has_key('dataType'):
            json_data['dataType'] = 'mac'
        if not json_data.has_key('dataFileUrl'):
            json_data['dataFileUrl'] = ''
        return json_data
    
    def analyze_json_datas_to_query(self,export_path='',scaleRange={},json_required_keys=[]):
        #'uuid','taskType','dataType','dataFileUrl','companyGroup','shopGroupList','dayTimeRange','role','customerType','frequencyRange','durationRange','scale'
        if self.testing:
            json_datas = '''{
                "uuid": "XQ20150818001",
                "taskType": "system",
                "dataType": "mac",
                "dataFileUrl":"",
                "dataList": [],
                "companyGroup": 11001,
                "shopGroupList": [32011380,32011560],
                "dayRange": [["2015-07-01", "2015-07-30"],["2015-08-01"]],
                "timeRange": [[0,9],[16,22],[23]],
                "dayTimeRange": [
                        {    
                        "startDay": "2015-07-01", 
                        "endDay": "2015-07-30", 
                        "weeks": [0,1,2,3,4,5,6], 
                        "startTime": "00:00:00", 
                        "endTime": "23:59:59"
                        },
                        {    
                        "startDay": "2015-07-01", 
                        "endDay": "2015-07-30", 
                        "weeks": [0,1,2,3,4,5], 
                        "startTime": "16:00:00", 
                        "endTime": "23:59:59"
                        },
                        {    
                        "startDay": "2015-08-01", 
                        "endDay": "2015-08-01", 
                        "weeks": [0,1,2,3,4,5], 
                        "startTime": "00:00:00", 
                        "endTime": "09:59:59"
                        },
                    ],
                "role": 3,
                "customerType": 0,
                "frequencyRange": [[1,5],[7,10]],
                "durationRange": [[0,300000]],
                "tags": [],
                "scale":0
            }'''
        else:
            ret = self.get_demand_from_tab_by_status(status=8)
            
            if ret == None:
                logging.warn('Can Not Found Any Demand.')
                return
            uuid,json_datas = ret
            
            self.update_demand_status(uuid, status=9)
            
        self.uuid = uuid
            
        json_datas = self.check_json(uuid, json_data_str=json_datas, feilds=json_required_keys)
        if json_datas == False:
            self.update_demand_status(uuid, status=2)
            return
        #uuid = json_datas['uuid']
        taskType = json_datas['taskType']
        dataType = json_datas['dataType']
        scale = json_datas['scale']
        dataFileUrl = json_datas['dataFileUrl']
        
        companyGroup = json_datas['companyGroup']

    #     dayRange = json_datas['dayRange']
    #     timeRange = json_datas['timeRange']
        
#         dayTimeRange = json_datas['dayTimeRange']
#         customerType = json_datas['customerType']
#         role = json_datas['role']
#         
#         frequencyRange = json_datas['frequencyRange']
#         durationRange = json_datas['durationRange']
        
        if scale not in scaleRange.keys():
            logging.error('scale %s not in scaleRange %s'%(scale,scaleRange)) 
            return
        elif scale == 0:
            min_line_cnt = max_line_cnt = None
        else:
            min_line_cnt , max_line_cnt = scaleRange[scale]
        
        export_file = export_path + '%s.%s.%s.%s.txt'%(uuid,taskType,dataType,companyGroup)
        import_file = export_file+'.tmp'
        if taskType == 'system' and dataType == 'mac':
            
            export_status = self.export_macs_from_hive(json_datas=json_datas,export_file=export_file,import_file=import_file)

#         elif os.path.exists(import_file):
#             pass
#         elif  os.path.exists(import_file+'.gz'):
#             import_file = import_file+'.gz'
        else:
            import_file = export_file+'.tmp'
            logging.error('You Must Get %s From dataFileUrl %s Or Create it by yourself.'%(import_file,dataFileUrl))
            if dataFileUrl != '':
                wgetCmd = 'wget -T 5 -t 5 -c  "%s"  -O %s'%(dataFileUrl,import_file)
                logging.info(wgetCmd)
                
                wget_status , stdout = commands.getstatusoutput(wgetCmd)
                logging.info(stdout)
                if wget_status != 0 :
                    logging.error('Wget %s error.'%dataFileUrl)
                    self.update_demand_status(uuid, status=2)
                    return
                    
                
#                 for i in range(self.retryTimes):
#                     wget_status , stdout = commands.getstatusoutput(wgetCmd)
#                     if wget_status==0:break
#                     logging.error('wget retry %s'%(i+1))
        
        
        if os.path.exists(import_file):
            logging.info('Import File %s already Exists.'%import_file)
        else:
            logging.error('Can Not Found Import File %s.'%import_file)
            self.update_demand_status(uuid, status=2)
            return
        
        encrypt_status = myEncryptUploadFile.encrypt_and_update_tab(import_file=import_file,
                                                   export_file=export_file,
                                                   data_type=dataType, 
                                                   max_line_cnt=max_line_cnt,
                                                   min_line_cnt=min_line_cnt,
                                                   demand_id=uuid)
        if encrypt_status == False:
            self.update_demand_status(uuid, status=2)
            return
            
        scp_status = True
        if self.remoteHost:
            scp_status = self.scp_file(host=self.remoteHost, export_file=export_file)
        
        
        if scp_status == False:
            self.update_demand_status(uuid, status=2)
            return
        else:
            self.update_demand_info(uuid=uuid, export_file=export_file, data_type=dataType)
    
    
    def get_new_customer_from_mysql(self,shopGroupList=[],dayRange=[]):
        groups =  map(lambda group :str(group),shopGroupList)
        group_query = 'dgroup in (%s)'%(','.join(groups))
        day_query =  self.get_query_by_range(dataRange = dayRange,feild='day')
        
        query = 'select mac from mac_customer_stats where day=first_day'
        
        if group_query!=None:
            query += ' and %s'%group_query
        if day_query != None:
            query += ' and %s '%day_query
        logging.info(query)
        rets = myZhimind.SelectAll(query)
        datas = []
        for ret in rets:
            mac = ret[0]
            datas.append(mac)
        logging.info('Found %s New Customer.'%len(datas))
        return datas
        
        
        
    
    def get_mysql_query(self,shopGroupList=[],dayRange=[],frequencyRange=[],durationRange=[],role=0):
        groups =  map(lambda group :str(group),shopGroupList)
        group_query = 'grp in (%s)'%(','.join(groups))
        day_query =  self.get_query_by_range(dataRange = dayRange,feild='day')
        freq_query =  self.get_query_by_range(dataRange = frequencyRange,feild='sum(freq)')
        dur_query =  self.get_query_by_range(dataRange = durationRange,feild='sum(sum_dur)/sum(freq)')
        
        query_top = 'select mac,sum(visit_freq) as sum_freq,sum(sum_dur)/sum(visit_freq) as avg_dur from %s'%('mac_customer_stats')
        query = ''
        if group_query != None and len(group_query)>0:
            query += group_query
        if day_query != None:
            query += ' and %s '%day_query
    
        query = re.sub('^and','',query.strip())
    
        having_query = ''
        if freq_query != None:
            having_query += ' %s'%freq_query
        if dur_query != None:
            having_query+= ' and %s'%dur_query
        having_query = re.sub('^and','',having_query.strip())
        if having_query!='':
            query += '  group by mac  having %s'%having_query
        else:
            query += '  group by mac  '
        full_query = query_top+' where '+ query
        logging.info(full_query)
        return full_query
        
        
        
      
    def get_hive_query_2(self,shopGroupList=[],dayTimeRange=[],frequencyRange=[],durationRange=[],role=0):
        groups =  map(lambda group :str(group),shopGroupList)
        group_query = 'grp in (%s)'%(','.join(groups))
    #     day_query =  get_query_by_range(dataRange = dayRange,feild='day')
    #     hour_query =  get_query_by_range(dataRange = timeRange,feild = 'hour(st)')
        freq_query =  self.get_query_by_range(dataRange = frequencyRange,feild='sum_freq')
        dur_query =  self.get_query_by_range(dataRange = durationRange,feild='avg_dur')
        if role == 3:
            role_query = 'role in (3,10)'
        elif role == 1:
            role_query = 'role in (-1,1)'
        else:
            role_query = 'role in (-1,1,3,10)'
        query_top = 'select mac ,count(*) as sum_freq,avg(dur/1000) as avg_dur from %s '%(self.hiveTab)
        query = ''
        if group_query != None and len(group_query)>0:
            query += group_query
            
            
        day_time_range_query_list = []
        for dayTime in dayTimeRange:
            startDay = dayTime['startDay']
            endDay = dayTime['endDay']
            weeks = dayTime['weeks']
            startTime = dayTime['startTime']
            endTime = dayTime['endTime']
            day_time_query = ''
            
            day_query = self.get_query_by_range(dataRange = [[startDay,endDay]],feild='day')
            if day_query !=None:
                day_time_query = day_query
            
            if startTime == '00:00:00' and endTime == '23:59:59':
                hour_query = None
            else:
                hour_query = self.get_query_by_range(dataRange = [[startTime,endTime]],feild='SUBSTRING(st,12,8)')
            
            if hour_query !=None:
                day_time_query +=' and %s '%hour_query
            
            
            if weeks == [0,1,2,3,4,5,6]:
                week_query = None
            else:
                #2015-08-16 为周日
                #日期相减函数：datediff
                #pmod 是正取余函数
                week_query = self.get_query_by_list(dataList = weeks,feild= 'pmod(datediff(day,"2015-08-16"),7)')
            
            if week_query !=None:
                day_time_query +=' and %s '%week_query
            
            day_time_query = re.sub('^and','',day_time_query.strip())
            
            if day_time_query!='':
                day_time_range_query_list.append('(%s)'%day_time_query)
            
            
        if len(day_time_range_query_list)!=0:
            day_time_range_query = '(%s)'%(' or '.join(day_time_range_query_list))
            query += ' and %s'%day_time_range_query
        
        if role_query !=None:
            query += ' and %s'%role_query
        query = re.sub('^and','',query.strip())
        having_query = ''
        if role==3:
            
            if freq_query != None:
                having_query += ' %s'%freq_query
            if dur_query != None:
                having_query+= ' and %s'%dur_query
            having_query = re.sub('^and','',having_query.strip())
        if having_query!='':
            query += ' group by mac having %s'%having_query
        else:
            query+= ' group by mac'
        full_query = query_top+' where '+ query
        logging.info(full_query)
        return full_query
      
        
    def get_hive_query(self,shopGroupList=[],dayRange=[],timeRange=[],frequencyRange=[],durationRange=[],role=0):
        groups =  map(lambda group :str(group),shopGroupList)
        group_query = 'grp in (%s)'%(','.join(groups))
        day_query =  self.get_query_by_range(dataRange = dayRange,feild='day')
        hour_query =  self.get_query_by_range(dataRange = timeRange,feild = 'hour(st)')
        freq_query =  self.get_query_by_range(dataRange = frequencyRange,feild='sum_freq')
        dur_query =  self.get_query_by_range(dataRange = durationRange,feild='avg_dur')
        if role == 3:
            role_query = 'role in (3,10)'
        elif role == 1:
            role_query = 'role in (-1,1)'
        else:
            role_query = 'role in (-1,1,3,10)'
        query_top = 'select mac ,count(*) as sum_freq,avg(dur/1000) as avg_dur from %s '%(self.hiveTab)
        query = ''
        if group_query != None and len(group_query)>0:
            query += group_query
        if day_query != None:
            query += ' and %s '%day_query
        if hour_query != None:
            query += ' and %s '%hour_query
        if role_query !=None:
            query += ' and %s'%role_query
        query = re.sub('^and','',query.strip())
        having_query = ''
        if role==3:
            
            if freq_query != None:
                having_query += ' %s'%freq_query
            if dur_query != None:
                having_query+= ' and %s'%dur_query
            having_query = re.sub('^and','',having_query.strip())
        if having_query!='':
            query += ' having %s'%having_query
        else:
            query+= '  group by mac  '
        full_query = query_top+' where '+ query
        logging.info(full_query)
        return full_query
        
        
        
    def get_query_by_list(self,dataList=[],feild='day'):   
        if len(dataList) == 0:
            return None
        elif len(dataList) == 1:
            return '%s="%s"'%(feild,dataList[0])
        else:
            return "%s in (%s)"%(feild,'"'+'","'.join(map(lambda x :str(x),dataList))+'"')
    
        
    def get_query_by_range(self,dataRange=[],feild='day'):
        query = ''
        i = 0
        for a_and_b in dataRange:
            i+=1
            if len(a_and_b)==1:
                a = b = a_and_b[0]  
            else:
                a,b=a_and_b
            if i>1:
                query += ' or '
            if a==b:
                query += ' %s = "%s" '%(feild,a)
            else:
                query += ' %s between "%s" and "%s" '%(feild,a,b)
        if i>1:
            query= "(%s)"%query
        if query == '':
            query = None
        return query
    
    
    
    
    
    
    
    
    def main(self,args):
        
        scaleRange = {0:None,1:[300,10000],2:[10001,100000],3:[100001,2000000]}
        json_required_keys = ['uuid','taskType','companyGroup','scale']
        #,'dayTimeRange','role','customerType','frequencyRange','durationRange','shopGroupList'
        
        testing = args.testing
        token = args.token
        hiveTab = args.hiveTab
        self.testing = testing
        
        self.token = token
        self.hiveTab  = hiveTab
        export_path = args.exportPath
        remoteHost = args.remoteHost
        self.remoteHost = remoteHost
        if remoteHost == None:
            wsUrl = None
        else:
            wsUrl = 'http://%s:8080/zm-analysis-1.0/api/v1/python/dataAnalysisFinished'%remoteHost
        self.wsUrl = wsUrl
        logging.info('Main Start.')
        try:
            self.analyze_json_datas_to_query(export_path=export_path,scaleRange=scaleRange,json_required_keys=json_required_keys)
        except Exception , error:
            
            logging.error(error)
            
            self.update_demand_status(self.uuid, status=2)
            
            
        logging.info('Main End.')

myAnalyzeCustomerDemand = analyzeCustomerDemand()
if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(description='args')
    parser.add_argument('--exportPath',metavar='export path',default='/data/tmp/')
    parser.add_argument('--testing',action='store_true',help='testing')
    parser.add_argument('--token',metavar='token',default='10002000')
    parser.add_argument('--remoteHost',metavar='scp to host',default=None)
    parser.add_argument('--hiveTab',metavar='hive tab',default='newhivehistoryrole')
    args = parser.parse_args()
    #logging.basicConfig(level = logging.INFO)
    from base.mylogging import logging_stdout_and_logfile
    logging = logging_stdout_and_logfile( logfile='/data/log/analysis_customer_demand.testing.log',
                                          maxBytes=100 * 1024 * 1024)
    
    
    myAnalyzeCustomerDemand.main(args)
