#!/usr/bin/env python
#-*-encoding:utf-8-*-
'''
Created on 2014年11月4日

@author: chenyongbing
'''
import argparse,sys,datetime,os,re
from localConfig import LocalConfig
myLocalConfig = LocalConfig()


from runIdentifyByGroup import RunIdentifyByGroup

#myRunIdentifyByGroup = RunIdentifyByGroup()
current_dir = os.path.split(os.path.realpath(__file__))[0]
sys.path.append(os.path.join(current_dir,'../'))

from base.PyMySQL import ConnectMySQL
from base.localConfig import baseLocalConfig
from base.retail_datas import myRetailDatas
tuning_host = baseLocalConfig.mysqlConfig['datatuning']['host']
tuning_port = baseLocalConfig.mysqlConfig['datatuning']['port']
tuning_user = baseLocalConfig.mysqlConfig['datatuning']['user']
tuning_passwd = baseLocalConfig.mysqlConfig['datatuning']['passwd']
tuning_db = baseLocalConfig.mysqlConfig['datatuning']['db']

myConnectMySQL = ConnectMySQL(host=tuning_host,user=tuning_user,
                              password=tuning_passwd,
                              db = tuning_db
                              )





class RunIdentifyEveryDay(RunIdentifyByGroup):
    '''
    classdocs
    '''



    def export_to_database_by_group(self,st='',et='',min_rssi=-90,max_rssi=0,walkby_list=['60','120'],comp='',groups=[],show='customer',type='stat',order='time',showTypeList=['day','hour'],opening=None):
        groupsInfo = []
        for group,devices in groups.iteritems():
            i = 0
            groupsInfo=[{'group':str(group),'devices':devices}]
            
            rssiMsgList = self.stat_customer_by_hour(st=st,et=et,min_rssi=min_rssi,max_rssi=max_rssi,walkbyList=walkby_list,comp=comp,groupsInfo=groupsInfo,show=show,type=type,order=order,showTypeList=showTypeList,opening=opening)
            self.export_to_database(rssiMsgList)
        
             
#             for walkby in walkby_list:
#                 walkby= int(walkby)
#                 i+=1
#                 logging.info('stat flow data start,group:%s waliby:%s,min_rssi:%s,max_rssi:%s'%(group['group'],walkby,min_rssi,max_rssi))
#                 #rssiMsgList = myRunIdentifyByGroup.stat_customer_by_hour(st=st,et=et,min_rssi=min_rssi,max_rssi=max_rssi,walkby=walkby*60,comp=comp,groups=groups,show=show,type=type,order=order,showTypeList=showTypeList,opening=opening)
#                 rssiMsgList = self.stat_customer_by_hour(st=st,et=et,min_rssi=min_rssi,max_rssi=max_rssi,walkby=walkby,comp=comp,groups=[group],show=show,type=type,order=order,showTypeList=showTypeList,opening=opening)
#                  
#                 logging.info('stat flow data end ,group:%s length %s'%(group['group'],len(rssiMsgList)))
#                  
#                 if i==1:
#                     self.export_to_database(rssiMsgList,walkby= walkby,type='all')
#                 else:
#                     self.export_to_database(rssiMsgList,walkby= walkby,type='customer')
#                 del rssiMsgList
    
    def get_customer_count_with_hour(self,hourMsgs=[]):
        datas = {}
        day_customer_count = 0
        day_customer_macs = []
        for hour,hmsgs in hourMsgs.items():
            customer_macs = []
            customer_count = 0
            for hmsg in hmsgs:
                role = hmsg['role']
                mac = hmsg['mac']
                if role==3:
                    customer_count+=1
                    customer_macs.append(mac)
            datas[hour] = {'customer_count':customer_count,'customer_dist_count':len(set(customer_macs))}
            day_customer_count+=customer_count
            day_customer_macs+=customer_macs
        datas['day'] = {'customer_count':day_customer_count,'customer_dist_count':len(set(day_customer_macs))}
        return datas
    
    def get_flow_and_customer_count(self,hourMsgs=[]):
        customer_count = 0
        flow_count = 0
        flow_macs = []
        customer_macs = []
        for hmsg in hourMsgs:
            role = hmsg['role']
            mac = hmsg['mac']
            if mac not in flow_macs:
                flow_macs.append(mac)
            flow_count+=1
            if role==3:
                customer_count+=1
                if mac not in customer_macs:
                    customer_macs.append(mac)
        return flow_count,customer_count,len(flow_macs),len(customer_macs)
    
    def get_flow_and_customer_count_on_day(self,hourMsgs):
        flow_count = 0 
        customer_count = 0
        flow_macs = []
        customer_macs = []
        for hour , datas in hourMsgs.iteritems():
            for data in datas:
                mac = data['mac']
                role = data['role']
                if mac not in flow_macs:
                    flow_macs.append(mac)
                flow_count+=1
                if role==3:
                    customer_count+=1
                    if mac not in customer_macs:
                        customer_macs.append(mac)
        return flow_count,customer_count,len(flow_macs),len(customer_macs)
    
    def export_to_database(self,rssiMsgList,tab='group_count_by_rssi',type='all'):
        logging.info('start to insert data to database')
        #query = 'insert  ignore '+tab+' (day,hour,dgroup,rssi,walkby,role_name,visit_cnt,visit_dist_cnt) Values (%s,%s,%s,%s,%s,%s,%s,%s)'
        query = 'insert into '+tab+' set day=%s,hour=%s,dgroup=%s,rssi=%s,walkby=%s,role_name=%s,visit_cnt=%s,visit_dis_cnt=%s\
                ON DUPLICATE KEY UPDATE visit_cnt=%s,visit_dis_cnt=%s'
                    #print query
        #query = 'insert into device_day_rank_stats set  group_id="%s" , \
        #    deviceid="%s" , day="%s",auth_user_count="%s",nonauth_user_count="%s" ,total_user_count="%s"\
        #    ON DUPLICATE KEY UPDATE auth_user_count="%s",nonauth_user_count="%s" ,total_user_count="%s"
        logging.info('Insert Query : %s'%query)
        
        for groupMsgList in rssiMsgList:
            group = groupMsgList['group']
            days = groupMsgList['data'].keys()
            datas = []
            for day , msgList in groupMsgList['data'].iteritems():
                
                #print msgList
                for rssi , walkMsgs in msgList.iteritems():
                    for walkby,hourMsgs in walkMsgs.iteritems():
                        #print group,day,rssi,tab
                        #print hourMsgs
                        
                        
                        customer_count_datas = self.get_customer_count_with_hour(hourMsgs)
                        for hour , customer_count_data in customer_count_datas.items():
                            if hour=='day':
                                role_name = 'day_customer'
                                hour = 0
                            else:
                                role_name = 'all_customer'
                            customer_count = customer_count_data['customer_count']
                            customer_dist_count = customer_count_data['customer_dist_count']
                            datas.append((day,hour,str(group),str(rssi),str(walkby),role_name,str(customer_count),str(customer_dist_count),str(customer_count),str(customer_dist_count)))
                logging.info('group %s day %s insert query OK.'%(group,day))
                        
#                         for h in range(24):
#                             hour_str = '0'*(2-len(str(h))) + str(h)
#                             if hour_str in hourMsgs.keys():
#                                 flow_count ,customer_count,flow_dist_count,customer_dist_count  = self.get_flow_and_customer_count(hourMsgs[hour_str])
#                             else:
#                                 flow_count = 0
#                                 customer_count = 0
#                                 customer_dist_count = 0
#                                 flow_dist_count = 0
#                             logging.debug( "%s,%s,%s,%s,%s,%s,%s,%s,%s"%(day,hour_str,str(group),str(rssi),str(walkby),type,str(flow_count),str(customer_count),str(customer_dist_count)))
#                             if type=='all':
#                                 if flow_count!=0:
#                                     datas.append((day,hour_str,str(group),str(rssi),str(0),'all_flow',str(flow_count),str(flow_dist_count),str(flow_count),str(flow_dist_count)))
#                             if customer_count!=0:
#                                 
#                             datas.append((day,hour_str,str(group),str(rssi),str(walkby),'all_customer',str(customer_count),str(customer_dist_count),str(customer_count),str(customer_dist_count)))
#                         flow_count ,customer_count,flow_dist_count,customer_dist_count  = self.get_flow_and_customer_count_on_day(hourMsgs)
#                         if type=='all':
#                             if flow_count!=0:
#                                 datas.append((day,hour_str,str(group),str(rssi),str(0),'day_flow',str(flow_count),str(flow_dist_count),str(flow_count),str(flow_dist_count)))
#                         if customer_count!=0:
#                             
#                         datas.append((day,hour_str,str(group),str(rssi),str(walkby),'day_customer',str(customer_count),str(customer_dist_count),str(customer_count),str(customer_dist_count)))
#                         
            myConnectMySQL.executemany(query, datas)
                
    
if __name__ == '__main__':
    import logging
    from logging.handlers import RotatingFileHandler
    
    level = logging.INFO
    logging.basicConfig(level=level)
    
    Rthandler = RotatingFileHandler('/var/log/pyidentify.log', maxBytes=10*1024*1024,backupCount=0)
    Rthandler.setLevel(level)
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    Rthandler.setFormatter(formatter)
    logging.getLogger('').addHandler(Rthandler)
     
    console = logging.StreamHandler()
    console.setLevel(level)
    console.setFormatter(formatter)
    logging.getLogger('').addHandler(console)
    parser = argparse.ArgumentParser(description='args')
    parser.add_argument('--rawpath', metavar=u'rawdata file path ', default=None)
    parser.add_argument('--groups', metavar=u'groups ', default='')
    parser.add_argument('--rssi_min', metavar=u'rssi min ', default=-90,type=int)
    parser.add_argument('--rssi_max', metavar=u'rssi max ', default=-40,type=int)
    parser.add_argument('--walkby_list', metavar=u'walkby list ', default='0,15,30,45,60,120')
    parser.add_argument('--startTime',metavar=u'startTime',default=None)
    parser.add_argument('--endTime',metavar=u'endTime',default=None)
    parser.add_argument('--nosysemployee',action='store_true',help='no system employee')
    args = parser.parse_args()
    rawpath = args.rawpath
    groups = args.groups
    rssi_min = args.rssi_min
    rssi_max = args.rssi_max
    walkby_list = args.walkby_list.split(',')
    nosysemployee = args.nosysemployee
    
    
    show ='all'
    startTime = args.startTime
    endTime = args.endTime
    if rawpath==None:
        rawpath = myLocalConfig.rawdata_path + '/rawdata_group'
    yesterday = (datetime.date.today()-datetime.timedelta(1)).strftime('%Y-%m-%d')
    if startTime==None:
        startTime=yesterday
    if endTime==None:
        endTime = yesterday
    #print rawpath
    myRunIdentifyEveryDay = RunIdentifyEveryDay(nosysemployee=nosysemployee)
    #days = myRunIdentifyByGroup.get_slist_between_st_et(startTime, endTime)
    days = myRunIdentifyEveryDay.get_slist_between_st_et(startTime, endTime)
    for day in days:
        logging.info('Start identify role %s'%day)
        if groups=='':
            if os.path.exists(rawpath+'/'+day):
                groupList =( re.sub('.txt.tar.gz','',','.join(os.listdir(rawpath+'/'+day)))).split(',')
            else:
                groupList = []
        else:
            groupList = groups.split(',')
        #print groupList

        gList = myRetailDatas.get_devices_by_group( groups=groupList)
        #print gList
        
        myRunIdentifyEveryDay.export_to_database_by_group(st=day, et=day, min_rssi=rssi_min, max_rssi=rssi_max, walkby_list=walkby_list, comp=None, groups=gList, show=show, type='stat', order='time')
        
    