'''
Created on Dec 8, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import traceback
from com.stocklens.stock.common.utils import Logging,  PIDUtils 
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
class SinaClosedDailyManager(CrawlerManager):
    '''
    classdocs
    '''
    LOGGER_NAME_CRAWL = 'scd'

    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(SinaClosedDailyManager, self).__init__(json_config_file, 0.1, None)
        self.__sinaUrl = 'http://111.161.68.235/list=%s'
        self.__stockPerThread = 100
        self.logger = Logging.getLogger(SinaClosedDailyManager.LOGGER_NAME_CRAWL)

    def _getNonCommonConfig(self, config):
        super(SinaClosedDailyManager, self)._getNonCommonConfig(config)
        self.__sinaUrl = config['sinaUrl']
        self.__stockPerThread = int(config['stockPerThread'])
        
    def _generateTask(self, task, checkTaskList=True):
        if super(SinaClosedDailyManager, self)._generateTask(task,checkTaskList) is False:
            return False
        self.logger.debug("sinaUrl:%s", self.__sinaUrl)
        self.logger.debug("stockPerThread:%d", self.__stockPerThread)
        task[CrawlerConstants.PARA_CLASS] = SinaClosedDailyCrawler
        stockList = None
        if self.__stockPerThread< len(self._taskList):
            stockList = self._taskList[:self.__stockPerThread]
            self._taskList = self._taskList[self.__stockPerThread:]
        else:
            stockList = self._taskList
            self._taskList = list()
        task[SinaClosedDailyCrawler.PARA_URL] = self.__sinaUrl
        task[SinaClosedDailyCrawler.PARA_STOCK_LIST] = stockList
        return True

    def _initTask(self):
        sql = 'SELECT stockid, market FROM %s WHERE stockid NOT IN (SELECT stockid FROM %s)'% (SinaConstants.TABLE_SINA_STOCKINFO, SinaConstants.TABLE_SINA_STOCKINFO_STOP)
        if self.dbProxy.execute(sql)>0:
            self._taskList.extend([x[1].lower()+x[0] for x in self.dbProxy.cur.fetchall()])
        #add index
        self._taskList.append('sh000001')
class SinaClosedDailyCrawler(CrawlerBase):
    PARA_URL = 'url'
    PARA_STOCK_LIST = 'stockList'
    def __init__(self, controller, dbProxy, request):
        super(SinaClosedDailyCrawler, self).__init__(controller, dbProxy, request)
        self.__url = request[SinaClosedDailyCrawler.PARA_URL] % ','.join(request[SinaClosedDailyCrawler.PARA_STOCK_LIST])
        self.logger = Logging.getLogger(SinaClosedDailyManager.LOGGER_NAME_CRAWL)
        self.logger.debug('Totally %d stock to query', len(request[SinaClosedDailyCrawler.PARA_STOCK_LIST]))

    def run(self):
        super(SinaClosedDailyCrawler, self).run()
        
        status = self.__parse()
        
        #status = CrawlerConstants.VAL_STATUS_FINISH
        self._reportDone(status)
    
    def __parse(self):
        content = self._fetchContent(self.__url)
        mdRawList = content.split(';')
        values = list()
        for mdRaw in mdRawList:
            try:
                equalIndex = mdRaw.find('=')
                if equalIndex<0:
                    continue
                sinaStockId = mdRaw[equalIndex-8:equalIndex]
                stockid = sinaStockId[2:]
                market = sinaStockId[:2].upper()
                mdStr = mdRaw[mdRaw.find('"')+1:mdRaw.rfind('"')]
                fields = mdStr.split(',')
                
                if len(fields)<32:
                    continue
                OpenPrice=float(fields[1])
                if OpenPrice == 0:
                    #the stock was stopped
                    continue
                tradingDay = fields[30].replace('-','')
                HighestPrice=float(fields[4])
                LowestPrice=float(fields[5])
                ClosePrice=float(fields[3])
                Volume=int(fields[8])
                Amount=float(fields[9])
                LastClose = float(fields[2])
                valuestr = '("%s", "%s", "%s", %f, %f, %f, %f, %d, %f, %f)'%\
                          (stockid, market, tradingDay,
                           OpenPrice, HighestPrice, LowestPrice, ClosePrice, 
                           Volume,Amount,LastClose)


                values.append(valuestr)                
            except:
                pass
        if len(values)>0:
            sql = 'INSERT INTO sina_stockclosedaily (stockid, market, date, open_bfq, high_bfq, low_bfq, close_bfq,  vol, amount, lastclose) values '
            sql = sql + ','.join(values)            
            if self.dbProxy.execute(sql)>0:
                self.totalNum+=len(values)
                self.dbProxy.commit()
                return CrawlerConstants.VAL_STATUS_FINISH
        return CrawlerConstants.VAL_STATUS_FAILURE
        '''
        self.totalNum+=len(values)
        return CrawlerConstants.VAL_STATUS_FINISH
        '''

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('scd'):
        print 'Previous Sina Close Daily Crawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('scd', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    scd = SinaClosedDailyManager('conf/crawler/scd.cfg')
    scd.start()
    pidutils = PIDUtils('scd', scd.shutDown, 5, scd.logger)
    pidutils.start()
    sys.exit(0)            