# -*- coding: GBK -*-
'''
Created on Dec 22, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
import datetime, time

class EmRTZjlxCrawlerManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'erz'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(EmRTZjlxCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(EmRTZjlxCrawlerManager.LOGGER_NAME_CRAWL)

    def _initTask(self):
        self._taskList.append(3000)

    def _generateTask(self, task, checkTaskList=True):
        if super(EmRTZjlxCrawlerManager, self)._generateTask(task, True) is False:
            return False
        pageSize = self._taskList.pop(0)
        task[EmRTZjlxCrawler.PARA_PAGESIZE] = pageSize 
        task[CrawlerConstants.PARA_CLASS] = EmRTZjlxCrawler
        return True
        
class EmRTZjlxCrawler(CrawlerBase):
    PARA_PAGESIZE = 'pageSize'
    MAIN_URL = 'http://data.eastmoney.com/zjlx/detail.html'
    JS_URL = 'http://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx/JS.aspx?type=ct&st=(BalFlowMain)&sr=-1&p=%d&ps=%d&js=var#20PfrNiTgA={pages:(pc),date:#222016-01-04#22,data:[(x)]}&token=%s&cmd=C._AB&sty=DCFFITA&rt=48402463'
    def __init__(self, controller, dbProxy, request):
        super(EmRTZjlxCrawler, self).__init__(controller, dbProxy, request)
        self.__pageSize = request[EmRTZjlxCrawler.PARA_PAGESIZE]
        self.logger = controller.logger

    def __parseToken(self):
        url = EmRTZjlxCrawler.MAIN_URL
        content = self._fetchContent(url)
        try:
            lindex = content.find('token=')
            rindex = content[lindex:].find('{')
            token = content[lindex+6:lindex+rindex]
            return token
        except:
            traceInfo = traceback.format_exc()
            self.logger.error('Fail to crawl:%s', traceInfo)
            return None

    def run(self):
        super(EmRTZjlxCrawler, self).run()
        self.__cleanDB()
        while True:
            now = datetime.datetime.now()
            if now.strftime('%H:%M:%S')>'15:01:00':
                break
            if not CTPUtils.isMDTime(CTPUtils.Singleton, now):
                time.sleep(5)
                continue
            token = self.__parseToken()
            if token is not None:
                self.__parse(token)
            time.sleep(20)
            self.controller.randomSleep()
        self.__updateWhenClose()
        self._reportDone(CrawlerConstants.VAL_STATUS_FINISH )
        self.logger.info('Finish Crawl')

    def __cleanDB(self):
        dsql = 'DELETE from em_rtzjlx'
        if self.dbProxy.execute(dsql)>0:                
            self.dbProxy.commit()
        
    def __updateWhenClose(self):
        sql = 'INSERT INTO em_zjlx (stockid,market,date,main_amount,main_per,huge_amount,huge_per,big_amount,big_per,mid_amount,mid_per,small_amount,small_per,lastclose,pl) '+\
                'SELECT stockid,market,date(sdatetime),main_amount,main_per,huge_amount,huge_per,big_amount,big_per,mid_amount,mid_per,small_amount,small_per,lastclose,pl FROM em_rtzjlx'
        if self.dbProxy.execute(sql)>0:                
            self.dbProxy.commit()
                
    def __updateDB(self, values, keylist):
        if len(values)>0:
            dsql = 'DELETE from em_rtzjlx where stockid in (%s)'%','.join(keylist)
            if self.dbProxy.execute(dsql)>0:                
                self.dbProxy.commit()
            isql = 'INSERT INTO em_rtzjlx (stockid,market,sdatetime,main_amount,main_per,huge_amount,huge_per,big_amount,big_per,mid_amount,mid_per,small_amount,small_per,lastclose,pl) values '
            if self.dbProxy.execute(isql + ','.join(values))>0:
                self.totalNum+=len(values)
                self.dbProxy.commit()        
    
    def __translateFloat(self, text):
        if text == '-':
            return 0
        else:
            return float(text)
        
    def __parse(self, token):
        DB_SIZE = 50
        pageNum = 1
        values = list()
        keylist = list()
        while True:
            url = EmRTZjlxCrawler.JS_URL % (pageNum, self.__pageSize, token)
            url = url.replace('#','%')
            jo = None
            try:
                content = self._fetchContent(url)
                lindex = content.find('{')
                rindex = content.rfind('}')
                js = content[lindex:rindex+1]
                js = js.replace('pages','"pages"').replace('data','"data"').replace('date','"date"')
                jo = json.loads(js)
            except:
                self.logger.error('Fail to parse the json object for %s', url)
                return CrawlerConstants.VAL_STATUS_FINISH
            totalPage = jo['pages']
            data = jo['data']
            for item in data:
                fields = item.split(',')
                market = fields[0]
                if int(market) == 1:
                    market = 'SH'
                else:
                    market = 'SZ'
                stockid = fields[1]
                lastclose = self.__translateFloat(fields[3])
                pl = self.__translateFloat(fields[4])/100.0
                main_amount = self.__translateFloat(fields[5])*10000
                main_per = self.__translateFloat(fields[6])/100.0
                huge_amount = self.__translateFloat(fields[7])*10000
                huge_per = self.__translateFloat(fields[8])/100.0
                big_amount = self.__translateFloat(fields[9])*10000
                big_per = self.__translateFloat(fields[10])/100.0
                mid_amount = self.__translateFloat(fields[11])*10000
                mid_per = self.__translateFloat(fields[12])/100.0
                small_amount = self.__translateFloat(fields[13])*10000
                small_per = self.__translateFloat(fields[14])/100.0
                sdatetime = fields[-1]
                valuestr = '("%s", "%s", "%s", %f, %f, %f, %f, %f, %f, %f, %f, %f, %f,%f,%f)'%\
                          (stockid, market, sdatetime,
                           main_amount, main_per, huge_amount, huge_per, big_amount, big_per, 
                           mid_amount, mid_per, small_amount, small_per, lastclose, pl)
                values.append(valuestr)
                keylist.append(stockid)
                if len(values) > DB_SIZE:
                    self.__updateDB(values, keylist)
                    values = list()
                    keylist = list()
                
            if len(values) != 0:
                self.__updateDB(values, keylist)
                values = list()
                keylist = list()

            if pageNum<totalPage:
                pageNum+=1
            else:
                break
            self.controller.randomSleep()
        
        if len(values) !=0:
            self.__updateDB(values, keylist)
        return CrawlerConstants.VAL_STATUS_FINISH            


if __name__ == '__main__':
    if PIDUtils.isPidFileExist('erz'):
        print 'Previous East Money Zjlx process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('erz', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    CTPUtils.Singleton = CTPUtils()
    CTPUtils.Singleton.start()
    erz = EmRTZjlxCrawlerManager('conf/crawler/erz.cfg')
    erz.start()
    pidutils = PIDUtils('erz', erz.shutDown, 5, erz.logger)
    pidutils.start()
    sys.exit(0)                    