# -*- coding: utf-8 -*-
#Author:pako
#Email:zealzpc@gmail.com
from twisted.internet import threads, reactor,defer,protocol
from weibopy.auth import OAuthHandler
from weibopy.api import API
from twisted.application import internet, service
from twisted.protocols import basic
from twisted.web import resource, server, static, xmlrpc
import cgi
import json
import time
import trendmap
import mdb

debug=True
def dp(*data):
    """
    this function work the same as print()
    """
    if debug:
        if len(data)==1:
            print data
        else:
            for d in data:
                print d
    
         
def getDate():
    """
    return today's 00:00:00 unix time which format like time.time()
    """
    today=time.localtime()
    year=today.tm_year
    month=today.tm_mon
    day=today.tm_mday
    init_date_tuple=(year,month,day,0,10,10,10,0,0)
    unixTime=time.mktime(init_date_tuple)
    return unixTime

def transUinxtime2Strtime(utime,type=0):
#    stime=time.strftime("%a, %d %b",time.localtime(utime))
    if type==0:
        stime=time.strftime("%Y-%m-%d",time.localtime(utime))
        return stime
    elif type==1:
        stime=time.strftime("%m.%d",time.localtime(utime))
        return stime
    




  

class picProtocol(basic.LineReceiver):
    def lineReceived(self, cmd):
        print 'cmd:',cmd,type(cmd)
        if cmd.startswith('~') and cmd.endswith('~'):
            keyword,days=cmd[1:-1].split(':',1)
            days=int(days)
#            self.transport.write('sucess' + '\r\n')
#            self.transport.write('keyword=' +keyword+ '\r\n')
#            self.transport.write('days=' +str(days) + '\r\n')
        else:
            print 'wrong cmd'
            self.transport.write('wrong cmd' + '\r\n')
            self.transport.loseConnection()
            return 
        d=self.factory.getpic(days,keyword)
        def onError(err):
            self.transport.loseConnection()
            print err
            return 'Internal error in server'
        d.addErrback(onError)

        def writeResponse(message):
            message=message.encode('utf-8')
            self.transport.write(message)
            self.transport.loseConnection()
        d.addCallback(writeResponse)

class picService(service.Service):
    def __init__(self):
        self.users = {}
        
    def run(self,pass_day_count,q):  
        second_in_one_day=3600*24
        today_date=getDate()
        
#        original_count_tread=[0]*pass_day_count
#        repub_count_tread=[0]*pass_day_count
        data_list=[]
        
        interval=1 # the interval of two search date
        
        deferlist=[]
        
        def listCallback(result):  
            """
            """
            total_count_list=[r[1][0] for r in result]
            origenal_count_list=[r[1][1] for r in result]
            print "result:",result
            total_count_list.reverse()
            origenal_count_list.reverse()
            data_list.reverse()
            print "total_count_list result =", total_count_list
            print "origenal_count_list result =", origenal_count_list
            print 'data_list:',data_list
            
            image_data = [
                      {
                       'title':'原创微博',
    #                    'title':'orignal',
                       'xAxasString':data_list,
                       'yAxasData':[str(y) for y in origenal_count_list],
                       'pic_title':q+' 在新浪微博被提及的热门趋势'
    #                    'pic_title':kw
                       },
                      {
                       'title':'全部微博',
    #                    'title':'all',
                       'xAxasString':data_list,
                       'yAxasData':[str(y) for y in total_count_list]
                       }
                      ]
            url=trendmap.createMap(image_data)
            print url
            return url
        
        
        for i in range(0,pass_day_count,interval):
            dp("====================The result daily search data====================")
            end_time=int(today_date-i*second_in_one_day)
            i=i+interval
            start_time=int(today_date-(second_in_one_day*i))
    #        dp( "time+++:",i)
            dp( "end_time:",transUinxtime2Strtime(end_time),end_time)
            dp( "start_time:",transUinxtime2Strtime(start_time),start_time)
            if (pass_day_count/interval) >10:
                data_list.append(transUinxtime2Strtime(end_time,1))
            else:
                data_list.append(transUinxtime2Strtime(end_time))
            d = threads.deferToThread(self.search,q=q,filter_ori=0,starttime=start_time,endtime=end_time)
            deferlist.append(d)
        dl = defer.DeferredList(deferlist)
        dl.addBoth(listCallback)
        print "1st line after the addition of the callback"  
        return dl
        

    def search(self,q='',filter_ori=0,starttime=0,endtime=0,count=1,index=0):
        consumer_key= '1960886904'
        consumer_secret ='a1b611395e43bbec823a76b790984bb7'
        token = 'a43a6e918eff91077ea463d832594de3'
        tokenSecret = '07146f3aa9c5105b88448879f6d37d29'
        
        auth = OAuthHandler(consumer_key, consumer_secret)
        auth.setToken(token, tokenSecret)
        api = API(auth)
        result=[0,0]
        search_cache=mdb.daily_find(q,endtime,3600*24,filter_ori)
        if search_cache:
            print 'have original cache'
            result[0]=search_cache
        else:
            print "have not cache request now"
            thepage = api.zpc_search(q=q,
                                     needcount=True,
                                     count=count,
                                     starttime=starttime,
                                     endtime=endtime,
                                     filter_ori=filter_ori)
            res= json.read(thepage)
            
            total_count = res['total_count_maybe']
            mdb.daily_insert(q,total_count,endtime,3600*24,filter_ori)
            result[0]=total_count
        
        search_cache=mdb.daily_find(q,endtime,3600*24,5)
        if search_cache:
            print 'have original cache'
            result[1]=search_cache
        else:
            print "have not cache request now"
            thepage = api.zpc_search(q=q,
                                     needcount=True,
                                     count=count,
                                     starttime=starttime,
                                     endtime=endtime,
                                     filter_ori=5)
            res= json.read(thepage)
            
            total_count = res['total_count_maybe']
            mdb.daily_insert(q,total_count,endtime,3600*24,filter_ori)
            result[1]=total_count
        return result
    def getPicFactory(self):
        f = protocol.ServerFactory()
        f.protocol = picProtocol
        f.getpic=self.run
        return f

class picFactory(protocol.Factory):

    def run(self,pass_day_count,q):  
        second_in_one_day=3600*24
        today_date=getDate()
        
#        original_count_tread=[0]*pass_day_count
#        repub_count_tread=[0]*pass_day_count
        data_list=[]
        
        interval=1 # the interval of two search date
        
        deferlist=[]
        
        def listCallback(result):  
            """
            """
            total_count_list=[r[1][0] for r in result]
            origenal_count_list=[r[1][1] for r in result]
            print "result:",result
            total_count_list.reverse()
            origenal_count_list.reverse()
            data_list.reverse()
            print "total_count_list result =", total_count_list
            print "origenal_count_list result =", origenal_count_list
            print 'data_list:',data_list
            
            image_data = [
                      {
                       'title':'原创微博',
    #                    'title':'orignal',
                       'xAxasString':data_list,
                       'yAxasData':[str(y) for y in origenal_count_list],
                       'pic_title':q+' 在新浪微博被提及的热门趋势'
    #                    'pic_title':kw
                       },
                      {
                       'title':'全部微博',
    #                    'title':'all',
                       'xAxasString':data_list,
                       'yAxasData':[str(y) for y in total_count_list]
                       }
                      ]
            url=trendmap.createMap(image_data)
            print url
            return url
        
        
        for i in range(0,pass_day_count,interval):
            dp("====================The result daily search data====================")
            end_time=int(today_date-i*second_in_one_day)
            i=i+interval
            start_time=int(today_date-(second_in_one_day*i))
    #        dp( "time+++:",i)
            dp( "end_time:",transUinxtime2Strtime(end_time),end_time)
            dp( "start_time:",transUinxtime2Strtime(start_time),start_time)
            if (pass_day_count/interval) >10:
                data_list.append(transUinxtime2Strtime(end_time,1))
            else:
                data_list.append(transUinxtime2Strtime(end_time))
            d = threads.deferToThread(self.search,q=q,filter_ori=0,starttime=start_time,endtime=end_time)
            deferlist.append(d)
        dl = defer.DeferredList(deferlist)
        dl.addBoth(listCallback)
        print "1st line after the addition of the callback"  
        return dl
        

    def search(self,q='',filter_ori=0,starttime=0,endtime=0,count=1,index=0):
        consumer_key= '1960886904'
        consumer_secret ='a1b611395e43bbec823a76b790984bb7'
        token = 'a43a6e918eff91077ea463d832594de3'
        tokenSecret = '07146f3aa9c5105b88448879f6d37d29'
        
        auth = OAuthHandler(consumer_key, consumer_secret)
        auth.setToken(token, tokenSecret)
        api = API(auth)
        result=[0,0]
        search_cache=mdb.daily_find(q,endtime,3600*24,filter_ori)
        if search_cache:
            print 'have original cache'
            result[0]=search_cache
        else:
            print "have not cache request now"
            thepage = api.zpc_search(q=q,
                                     needcount=True,
                                     count=count,
                                     starttime=starttime,
                                     endtime=endtime,
                                     filter_ori=filter_ori)
            res= json.read(thepage)
            
            total_count = res['total_count_maybe']
            mdb.daily_insert(q,total_count,endtime,3600*24,filter_ori)
            result[0]=total_count
        
        search_cache=mdb.daily_find(q,endtime,3600*24,5)
        if search_cache:
            print 'have original cache'
            result[1]=search_cache
        else:
            print "have not cache request now"
            thepage = api.zpc_search(q=q,
                                     needcount=True,
                                     count=count,
                                     starttime=starttime,
                                     endtime=endtime,
                                     filter_ori=5)
            res= json.read(thepage)
            
            total_count = res['total_count_maybe']
            mdb.daily_insert(q,total_count,endtime,3600*24,filter_ori)
            result[1]=total_count
        return result
    
    protocol = picProtocol

    def __init__(self):
        self.quote = '' 
        self.getpic=self.run   
#if __name__ == '__main__':  
#    print search(q='iphone',filter_ori=5,starttime=time.time()-3600*24,endtime=time.time(),index=0)
#    reactor.callLater(1, run,5,'ipad')

reactor.listenTCP(8700, picFactory())
reactor.run()

#application = service.Application('picserver', uid=1, gid=1)
#fs=picService()
#picpact=fs.getPicFactory()
#internet.TCPServer(79, picpact).setServiceParent(service.IServiceCollection(application))