#!/usr/bin/env python
#-*-encoding:utf-8-*-
'''
Created on 2015年7月24日

@author: chenyongbing
'''
import sys,os,datetime,json
#current_dir = os.path.dirname(__file__)
current_dir = os.path.split(os.path.realpath(__file__))[0]

basePath = os.path.join(current_dir,'../../')
sys.path.append(basePath)

from twisted.python import threadable
threadable.init(1) #以线程安全初始化
from twisted.internet import threads, reactor,defer

from base.retail_datas import myRetailDatas
from base.time_method import myTimeMethod
from base.file_method import openTarFile,analyze_rawdata
import urllib2,time

import logging

import Queue


class MakingPusherData():
    def __init__(self,rawTarPath='/Udisk/rawdata_group/',max_post=100,export_to_file=False,testing=False):
        self.rawTarPath = rawTarPath
        self.max_post = max_post
        self.export_to_file = export_to_file
        self.rq = Queue.Queue()
        self.end = False
        self.testing = testing
    
    def listCallback(self,result=[]):  
        """
                    拿到所有线程执行完返回的结果
        """
        logging.info('all threads done.')
        reactor.stop()
    
    def send_pusher_data_by_thread(self,url=''):
        while 1:
            if self.rq.empty() and self.end==True:
                logging.info('queue is empty.')
                return
            elif self.end == False:
                logging.info('queue is empty , please wait 5 sec.')
                time.sleep(5)
                continue
            else:
                data = self.rq.get( timeout=5)
            
            self.send_pusher_data(url,data)
            
    def send_pusher_data(self,url,data):
        if self.testing:
            logging.info(url)
            return
        try:
            req=urllib2.Request(url,data)
            response=urllib2.urlopen(req)
            msg  = response.read()
            
            logging.info(msg)
            
        except Exception,err:
            logging.error('Send pusher data Error.')
    
    def making_wp_pusher_data(self,raw_file='',group='',group_name=''):
        datas = []
        for day,device,mac,stime,rssi in analyze_rawdata(raw_file):
            data = {"Device":device,"MAC":mac,"RSSI":rssi,"Time":"%s %s"%(day,stime)}     
            datas.append(data)
            if len(datas)>=self.max_post:
                logging.info('datas length %s'%len(datas))
                msg = json.dumps({"MacList":[{"GroupId":group,"GroupName":group_name,"List":datas}],"Type":1}).encode("utf-8")
                datas = []
                yield msg
        if len(datas)!=0:
            logging.info('datas length %s'%len(datas))
            msg = json.dumps({"MacList":[{"GroupId":group,"GroupName":group_name,"List":datas}],"Type":1}).encode("utf-8")    
            yield msg
    
    def making_pusher_data(self,comp_id='',startTime=None,endTime=None,if_thread = False,shop_group=None):
        logging.info('start making pusher datas')
        yesterday =   (datetime.date.today()-datetime.timedelta(1)).strftime('%Y-%m-%d')
        if startTime == None :
            startTime = yesterday
        if endTime == None:
            endTime = yesterday
        fr = None
        if self.export_to_file ==True:
            export_path = os.path.join(current_dir,'../report/pusher_datas')
            if not os.path.exists(export_path):
                os.mkdir(export_path)
            if startTime == endTime :
                export_file = export_path +'/' + 'pusherdatas.%s(%s).txt'%(comp_id,startTime)    
            else:
                export_file = export_path +'/' + 'pusherdatas.%s(%s-%s).txt'%(comp_id,startTime,endTime)  
            fr = open(export_file,'w')
                
        days = myTimeMethod.get_slist_between_st_et(startTime, endTime)
        groupsInfo = myRetailDatas.get_groups_info_by_comp( comp_id=comp_id)
        
        for group , groupInfo in groupsInfo.items():
            if shop_group!=None and int(group) != int(shop_group):
                logging.warn('group %s != %s'%(shop_group,group))
                continue
            group_name = groupInfo['name']
            print '%s %s'%(group_name,group)
            logging.info('%s %s'%(group_name,group))
            for day in days:
                group_tar_file = self.rawTarPath + '/' + day + '/' + str(group) +'.txt.tar.gz'
                logging.info('start send day %s group %s'%(day,group))
                if not os.path.exists(group_tar_file):
                    logging.warn('group tar file %s not exists.'%group_tar_file)
                    continue
                if comp_id == '65':
                    url = 'http://115.28.191.78/external/pushData'
                    for data in  self.making_wp_pusher_data(raw_file=group_tar_file, group=group, group_name=group_name):
                        
                        
                        if self.export_to_file:
                            fr.write(data+'\n')
                        else:
                            if if_thread:
                                self.rq.put(data)
                            else:
                                self.send_pusher_data(url, data)
        
        self.end = True       
        if self.export_to_file:
            fr.close()
        
if __name__ == '__main__':
    from logging.handlers import RotatingFileHandler

    logging.basicConfig(level= logging.INFO)
    
    Rthandler = RotatingFileHandler('/data/log/making_pusher_datas.log', maxBytes=100*1024*1024,backupCount=0)
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    Rthandler.setFormatter(formatter)
    logging.getLogger('').addHandler(Rthandler)
    
    console = logging.StreamHandler()
    console.setFormatter(formatter)
    logging.getLogger('').addHandler(console)
    
    
    
    
    import argparse
    parser = argparse.ArgumentParser(description='args')
    parser.add_argument('--comp_id', metavar=u'company id', required=True)
    parser.add_argument('--shop_group', metavar=u'shop group', default=None)
    parser.add_argument('--startTime' ,  metavar=u'start time', default=None)
    parser.add_argument('--endTime',metavar=u'endtime ',default=None)
    parser.add_argument('--rawTarPath',metavar=u'rawdata tar path',default='/Udisk/rawdata_group/')
    parser.add_argument('--export_to_file',action='store_true',help='export to file')
    parser.add_argument('--threads',metavar=u'threads  ',default=1, type = int)
    parser.add_argument('--testing',action='store_true',help='testing')
    args = parser.parse_args()

    import signal
    def signal_handler(signal, frame):
            print('You pressed Ctrl+C!')
            reactor.stop()
            sys.exit(0)
    signal.signal(signal.SIGINT, signal_handler)  
    comp_id = args.comp_id
    shop_group = args.shop_group
    startTime = args.startTime
    endTime = args.endTime
    rawTarPath = args.rawTarPath
    export_to_file = args.export_to_file
    
    thread_num = args.threads
    testing = args.testing
    logging.basicConfig(level=logging.debug)
    myMakingPusherData = MakingPusherData(rawTarPath=rawTarPath,export_to_file=export_to_file,testing=testing)
    
    if thread_num==1:
        myMakingPusherData.making_pusher_data(if_thread=False,comp_id=comp_id,shop_group=shop_group,startTime = startTime,endTime = endTime)
    else:
        reactor.suggestThreadPoolSize(thread_num+1)
    
        
        deferlist = []
        d = threads.deferToThread(myMakingPusherData.making_pusher_data,if_thread=True,comp_id=comp_id,shop_group=shop_group,startTime = startTime,endTime = endTime)
        deferlist.append(d)
        for i in range(thread_num):
            d = threads.deferToThread(myMakingPusherData.send_pusher_data_by_thread,url='http://115.28.191.78/external/pushData')
            deferlist.append(d)
            
            

        #创建deferredlist
        dl = defer.DeferredList(deferlist)
        #给deferredlist添加回调函数
        dl.addBoth(myMakingPusherData.listCallback)
        try:
            reactor.run()
        except KeyboardInterrupt:
            print "Interrupted by keyboard. Exiting."
            reactor.stop()
    