'''
Created on Jan 23, 2016

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import os
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase

class EmLhbJymxCrawlerManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'elj'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(EmLhbJymxCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(EmLhbJymxCrawlerManager.LOGGER_NAME_CRAWL)

    def _initTask(self):
        #self._taskList.append(3000)
        sql = 'select yybid from em_lhb_yyb'
        if self.dbProxy.execute(sql)>0:
            self._taskList.extend([(x[0]) for x in self.dbProxy.cur.fetchall()])

    def _generateTask(self, task, checkTaskList=True):
        if super(EmLhbJymxCrawlerManager, self)._generateTask(task, True) is False:
            return False
        yybid = self._taskList.pop(0)
        task[EmLhbJymxCrawler.PARA_YYBID] = yybid 
        task[CrawlerConstants.PARA_CLASS] = EmLhbJymxCrawler
        return True

class EmLhbJymxCrawler(CrawlerBase):
    PARA_YYBID = 'YYBID'
    JS_URL = 'http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?type=LHB&sty=YYHSIU&code=%s&p=%d&ps=5000&js=var%%20AfQyiJfl={%%22data%%22:[(x)],%%22pages%%22:%%22(pc)%%22,%%22update%%22:%%22(ud)%%22}&rt=48451102'

    def __init__(self, controller, dbProxy, request):
        super(EmLhbJymxCrawler, self).__init__(controller, dbProxy, request)
        self.__yybid = request[EmLhbJymxCrawler.PARA_YYBID]
        self.logger = controller.logger

    def run(self):
        super(EmLhbJymxCrawler, self).run()
        if self.__cleanDB() is False:
            self._reportDone(CrawlerConstants.VAL_STATUS_FAILURE)
            return
        status = self.__parse()
        self._reportDone(status)
        self.logger.info('Finish Crawl')

    def __cleanDB(self):
        dsql = 'DELETE from em_lhb_yybjymx where yybid="%s"' % (self.__yybid)
        self.logger.info('plan to delete for %s', self.__yybid)
        if self.dbProxy.execute(dsql)>=0:                
            self.dbProxy.commit()
            return True
        return False

    def __updateDB(self, values):
        self.logger.info('plan to insert %d for %s', len(values), self.__yybid)
        isql = 'INSERT INTO em_lhb_yybjymx (yybid,stockid,transdate,buyamount,sellamount) values '
        if self.dbProxy.execute(isql + ','.join(values))>0:
            self.totalNum+=len(values)
            self.dbProxy.commit()        

    def __parse(self):
        valueDict = {}
        pageNum = 1
        while True:
            url = EmLhbJymxCrawler.JS_URL % (self.__yybid,pageNum)
            content = self._fetchContent(url)
            lindex = content.find('{')
            rindex = content.rfind('}')
            js = content[lindex:rindex+1]
            try:
                jo = json.loads(js)
            except:
                self.logger.warn('No data for %s', self.__yybid)
                return CrawlerConstants.VAL_STATUS_FINISH
            totalPage = int(jo['pages'])
            data = jo['data']
            for item in data:
                fields = item.split(',')
                stockid = fields[2]
                transdate = fields[5]
                currentkey = stockid+'_'+transdate
                buyamount = float(fields[3])
                sellamount = float(fields[1])
                if currentkey not in valueDict:
                    valueDict[currentkey] = {'buy':buyamount, 'sell':sellamount}
                else:
                    valueDict[currentkey]['buy']+=buyamount
                    valueDict[currentkey]['sell']+=sellamount
            
            if pageNum<totalPage:
                pageNum+=1
            else:
                break
            self.controller.randomSleep()
        
        DB_SIZE = 50        
        values = list()
        for key in valueDict:
            keyitem = key.split('_')
            stockid = keyitem[0]
            transdate = keyitem[1]
            buyamount = valueDict[key]['buy']
            sellamount = valueDict[key]['sell']
            valuestr = '("%s","%s","%s",%f,%f)' %\
                        (self.__yybid, stockid, transdate,
                         buyamount, sellamount)
            values.append(valuestr)
            if len(values) >= DB_SIZE:
                self.__updateDB(values)
                values = list()
            
        if len(values) != 0:
            self.__updateDB(values)
            values = list()

        return CrawlerConstants.VAL_STATUS_FINISH            

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('elj'):
        print 'Previous East Money Lhb Yyb Jymx process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('elj', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    elj = EmLhbJymxCrawlerManager('conf/crawler/elj.cfg')
    elj.start()
    pidutils = PIDUtils('elj', elj.shutDown, 5, elj.logger)
    pidutils.start()
    sys.exit(0)                                    