'''
Created on Feb 4, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])

from com.stocklens.stock.common.utils import Logging,  PIDUtils 
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
import json
import datetime
import traceback
class SinaStockListManager(CrawlerManager):
    '''
    classdocs
    '''
    LOGGER_NAME_CRAWL_SSL = 'ssl'
    PAGE_SIZE = 40
    PARA_INDEX = 'index'
    PARA_FINISH = 'finish'
    PARA_FAILURE_COUNT = 'failureCount'
    PARA_SUCCESS_COUNT = 'successCount'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(SinaStockListManager, self).__init__(json_config_file, 0.1, SinaStockListCrawler.FAILUREURL_PATTERN)
        self.logger = Logging.getLogger(SinaStockListManager.LOGGER_NAME_CRAWL_SSL)

    def start(self):
        super(SinaStockListManager, self).start()
    
    def _generateTask(self, task, checkTaskList=False):
        superVal = super(SinaStockListManager, self)._generateTask(task, False)
        if superVal is False:
            return False
        
        task[CrawlerConstants.PARA_CLASS] = SinaStockListCrawler
        if len(self._taskList)==0:
            if self.__getNextItem() is False:
                return False
        taskItem = self._taskList.pop(0)
        task[SinaStockListCrawler.PARA_PAGE_SIZE] = SinaStockListManager.PAGE_SIZE
        task[SinaStockListCrawler.PARA_PAGE_NUM] = taskItem[0]
        task[SinaStockListCrawler.PARA_MARKET] = taskItem[1]
        return True        
    
    def __getNextItem(self):
        for i in range(0, self.__taskBitMapSize):
            key = self.__taskBitMap.keys()[self.__index]
            if self.__taskBitMap[key][SinaStockListManager.PARA_FINISH] is False:                              
                self._taskList.append((self.__taskBitMap[key][SinaStockListManager.PARA_INDEX], key))
                self.__taskBitMap[key][SinaStockListManager.PARA_INDEX]+=1
                self.__index = (self.__index+1)%self.__taskBitMapSize
                return True
            else:
                self.__index = (self.__index+1)%self.__taskBitMapSize
        
        return False
    
    def __initTaskBitMap(self):
        self.__taskBitMap = {
                             SinaStockListCrawler.VAL_MARKET_SZA:{
                                                                  SinaStockListManager.PARA_INDEX:1,
                                                                  SinaStockListManager.PARA_SUCCESS_COUNT:0,
                                                                  SinaStockListManager.PARA_FAILURE_COUNT:0, 
                                                                  SinaStockListManager.PARA_FINISH:False},
                             SinaStockListCrawler.VAL_MARKET_SHA:{
                                                                  SinaStockListManager.PARA_INDEX:1,
                                                                  SinaStockListManager.PARA_SUCCESS_COUNT:0,
                                                                  SinaStockListManager.PARA_FAILURE_COUNT:0, 
                                                                  SinaStockListManager.PARA_FINISH:False}
                             }
        self.__taskBitMapSize = len(self.__taskBitMap)
        self.__index = 0
        self.__fullSetName = set()
        self.__stopSet = set()
                 
    def _initTask(self):
        self.__initTaskBitMap()
        while(len(self._taskList)<self._numThread):
            self.__getNextItem()
    
    def _handleNotifyDone(self, request):
        '''
        No checking is done if the corresponding market is done
        If fail, update the failure count and if failure count reach maximum(worker's num), 
        mark this market as finish.
        If success but the return result is less than expected, it means the market is finish as well
        
        After the judgement, invoke the parent function for processing.
        '''
        totalNum = request[CrawlerConstants.PARA_TOTAL_NUM]
        status = request[CrawlerConstants.PARA_STATUS]
        market = request[SinaStockListCrawler.PARA_MARKET]
        if self.__taskBitMap[market][SinaStockListManager.PARA_FINISH] is False:
            if status == CrawlerConstants.VAL_STATUS_FAILURE:
                self.__taskBitMap[market][SinaStockListManager.PARA_FAILURE_COUNT]+=1
                if self.__taskBitMap[market][SinaStockListManager.PARA_FAILURE_COUNT] >=self._numThread:
                    self.__taskBitMap[market][SinaStockListManager.PARA_FINISH] = True
            elif status == CrawlerConstants.VAL_STATUS_FINISH:
                self.__taskBitMap[market][SinaStockListManager.PARA_FAILURE_COUNT]=0
                fullSetName = request[SinaStockListCrawler.PARA_FULL_SET]
                stopSet = request[SinaStockListCrawler.PARA_STOP_SET]
                self.__fullSetName.update(fullSetName)
                self.__stopSet.update(stopSet)
                if totalNum<SinaStockListManager.PAGE_SIZE:
                    self.__taskBitMap[market][SinaStockListManager.PARA_SUCCESS_COUNT]+=1
                    if self.__taskBitMap[market][SinaStockListManager.PARA_SUCCESS_COUNT] >=self._numThread:
                        self.__taskBitMap[market][SinaStockListManager.PARA_FINISH] = True
        
        allDone = True
        for value in self.__taskBitMap.values():
            if value[SinaStockListManager.PARA_FINISH] is False:
                allDone=False
                break
        if allDone:
            self.logger.debug('taskBitMap:%s', self.__taskBitMap)
            #self.__updateStockList()
            self.__overwriteStockList()
        super(SinaStockListManager, self)._handleNotifyDone(request)
    
    def __overwriteStockList(self):
        self.logger.info('Begin to overwrite stocklist, totally %d stock with %d stop', len(self.__fullSetName), len(self.__stopSet))
        sql = 'delete from %s' % (SinaConstants.TABLE_SINA_STOCKINFO)
        if self.dbProxy.execute(sql)>0:
            self.dbProxy.commit()
        
        sql = 'delete from %s' % (SinaConstants.TABLE_SINA_STOCKINFO_STOP)
        if self.dbProxy.execute(sql)>0:
            self.dbProxy.commit()
        
        #insert into stockinfo
        sql = 'INSERT INTO %s (stockid, market) values '% (SinaConstants.TABLE_SINA_STOCKINFO)
        values = list()
        for newStock in self.__fullSetName:
            values.append('("%s","%s")' % (newStock[0], newStock[1]))
        if len(values)>0:
            if self.dbProxy.execute(sql + ','.join(values))>0:
                self.dbProxy.commit()
        
        #insert into stockinfo_stop
        today = datetime.date.today().strftime('%Y-%m-%d')
        
        values = list()
        for newStock in self.__stopSet:
            values.append('("%s","%s","%s")' % (newStock[0], newStock[1], today))
        if len(values)>0:
            sql = 'INSERT INTO %s (stockid, market, date) values '% (SinaConstants.TABLE_SINA_STOCKINFO_STOP)
            if self.dbProxy.execute(sql + ','.join(values))>0:
                self.dbProxy.commit()        
        
    def __updateStockList(self):
        self.logger.info('Begin to update stocklist, totally %d stock with %d stop', len(self.__fullSetName), len(self.__stopSet))
        sql = 'select stockid, market from %s' % (SinaConstants.TABLE_SINA_STOCKINFO)
        self.dbProxy.execute(sql)
        resultSet = set([(str(x[0]),str(x[1])) for x in self.dbProxy.cur.fetchall()])
        
        differentSet = set(filter(lambda x: (x[0],x[1]) not in resultSet, self.__fullSetName))
        if len(differentSet)>0:
            values = list()
            for newStock in differentSet:
                values.append('("%s","%s")' % (newStock[0], newStock[1]))
            sql = 'INSERT INTO %s (stockid, market) values '% (SinaConstants.TABLE_SINA_STOCKINFO)
            if self.dbProxy.execute(sql + ','.join(values))>0:
                self.dbProxy.commit()
        
        #update stop table
        sql = 'select stockid, market from %s' % (SinaConstants.TABLE_SINA_STOCKINFO_STOP)
        self.dbProxy.execute(sql)
        stopresultSet = set([(str(x[0]),str(x[1])) for x in self.dbProxy.cur.fetchall()])
        
        reopenSet = set.difference(stopresultSet, self.__stopSet)
        newStopSet = set.difference(self.__stopSet, stopresultSet)
        if len(reopenSet)>0:
            sql = 'DELETE from %s WHERE ' % (SinaConstants.TABLE_SINA_STOCKINFO_STOP) 
            where_list = list()
            for item in reopenSet:
                where_list.append('(stockid="%s" and market="%s")' % (item[0],item[1]))
            if self.dbProxy.execute(sql + ' OR '.join(where_list))>0:
                self.dbProxy.commit()
        
        
        today = datetime.date.today().strftime('%Y-%m-%d')
        
        if len(newStopSet)>0:
            values = list()
            for newStock in newStopSet:
                values.append('("%s","%s","%s")' % (newStock[0], newStock[1], today))
            sql = 'INSERT INTO %s (stockid, market, date) values '% (SinaConstants.TABLE_SINA_STOCKINFO_STOP)
            if self.dbProxy.execute(sql + ','.join(values))>0:
                self.dbProxy.commit()        
    def _getFailureTasks(self):
        '''
        No failure task handling for this job.
        '''
        pass
class SinaStockListCrawler(CrawlerBase):
    FAILUREURL_PATTERN = 'Market_Center.getHQNodeData'
    VAL_MARKET_SHA = 'SH'
    VAL_MARKET_SZA = 'SZ'
    PARA_MARKET = 'market'
    PARA_PAGE_NUM = 'pageNum'
    PARA_PAGE_SIZE = 'pageSize'
    PARA_FULL_SET = 'fullSet'
    PARA_STOP_SET = 'stopSet'
    SINA_STOCK_LIST_SH_URI = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page=%d&num=%d&sort=symbol&asc=1&node=sh_a&symbol=&_s_r_a=init'
    SINA_STOCK_LIST_SZ_URI = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page=%d&num=%d&sort=symbol&asc=1&node=sz_a&symbol=&_s_r_a=init'
    KEYWORDS = ['symbol:','code:','name:','trade:','pricechange:','changepercent:','buy:','sell:','settlement:', 'open:','high:','low:','volume:','amount:','ticktime:','per:','pb:','mktcap:','nmc:','turnoverratio:']
    def __init__(self, controller, dbProxy, request):
        super(SinaStockListCrawler, self).__init__(controller, dbProxy, request)
        self.__market = request[SinaStockListCrawler.PARA_MARKET]
        self.__pageSize = request[SinaStockListCrawler.PARA_PAGE_SIZE]
        self.__pageNum = request[SinaStockListCrawler.PARA_PAGE_NUM]
        self.__url = None
        if self.__market == SinaStockListCrawler.VAL_MARKET_SHA:
            self.__url = SinaStockListCrawler.SINA_STOCK_LIST_SH_URI % (self.__pageNum, self.__pageSize)
        elif self.__market == SinaStockListCrawler.VAL_MARKET_SZA:
            self.__url = SinaStockListCrawler.SINA_STOCK_LIST_SZ_URI %  (self.__pageNum, self.__pageSize)        
        else:
            self.logger.warn('URL is none as market is not expected:%s', self.__market)
        self.logger = Logging.getLogger(SinaStockListManager.LOGGER_NAME_CRAWL_SSL)
    def run(self):
        super(SinaStockListCrawler, self).run()
        status = CrawlerConstants.VAL_STATUS_FINISH
        result = None
        if self.__url is None:            
            status = CrawlerConstants.VAL_STATUS_FAILURE
        else:
            self.logger.info('Start Crawl %s', self.__url)
            content = self._fetchContent(self.__url)
            if content is None:
                status = CrawlerConstants.VAL_STATUS_FAILURE
            else:
                status, result = self.__parse(content)
                if status != CrawlerConstants.VAL_STATUS_FINISH:
                    self._recordFailure(self.__url, 'fail to parse')
        if status == CrawlerConstants.VAL_STATUS_FINISH:
            self._recoverFailure(self.__url)
            
        self._reportDone(status, result)
        self.logger.info('Finish Crawl')
    
    def __parse(self, content):
        try:
            if len(content)<10:
                self.totalNum=0
                return CrawlerConstants.VAL_STATUS_FINISH, {SinaStockListCrawler.PARA_FULL_SET:set(), 
                    SinaStockListCrawler.PARA_STOP_SET:set()}
            for keyword in SinaStockListCrawler.KEYWORDS:
                content = content.replace(keyword,'"'+keyword[:-1]+'":')
            json_str = '{"content":%s}'%content
            jo = json.loads(json_str)
            self.totalNum = len(jo['content'])
            stock_fullSetName = set([(str(y['code']),self.__market, y['name'].encode('utf-8')) for y in jo['content']])
            stock_stopSet = set([(str(y['code']),self.__market) for y in filter(lambda x: float(x['amount'])==0,jo['content'])])
            
            return CrawlerConstants.VAL_STATUS_FINISH, {SinaStockListCrawler.PARA_FULL_SET:stock_fullSetName, 
                    SinaStockListCrawler.PARA_STOP_SET:stock_stopSet}
        except Exception:
            traceInfo = traceback.format_exc()
            self.logger.warn('Fail to parse:%s:%s', self.__url, traceInfo)
            self._recordFailure(self.__url, 'Fail to parse')
            return CrawlerConstants.VAL_STATUS_FAILURE, None

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('ssl'):
        print 'Previous SinaStockList Crawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('ssl', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    ssl = SinaStockListManager('conf/crawler/ssl.cfg')
    ssl.start()
    pidutils = PIDUtils('ssl', ssl.shutDown, 5, ssl.logger)
    pidutils.start()
    sys.exit(0)            