'''
Created on Sep 7, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])

from com.stocklens.stock.common.utils import Logging,  PIDUtils 
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
from bs4 import BeautifulSoup
import traceback
import time
class SinaMarginTradeHisMgr(CrawlerManager):
    '''
    classdocs
    '''

    LOGGER_NAME_CRAWL_SMH = 'smh'
    FILE_CONFIG_DATERANGE = 'dateRange'
    FILE_CONFIG_STARTDATE = 'startDate'
    FILE_CONFIG_ENDDATE = 'endDate'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(SinaMarginTradeHisMgr, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(SinaMarginTradeHisMgr.LOGGER_NAME_CRAWL_SMH)

    def start(self):
        super(SinaMarginTradeHisMgr, self).start()

    def _generateTask(self, task, checkTaskList=True):
        if super(SinaMarginTradeHisMgr, self)._generateTask(task, checkTaskList) is False:
            return False
         
        task[CrawlerConstants.PARA_CLASS] = SinaMarginTradeHisCrawler
        stockid, market = self._taskList.pop(0)
        task[SinaMarginTradeHisCrawler.PARA_STOCKID] = stockid
        task[SinaMarginTradeHisCrawler.PARA_MARKET] = market
        task[SinaMarginTradeHisCrawler.PARA_START_DATE] = self.__startDate
        task[SinaMarginTradeHisCrawler.PARA_END_DATE] = self.__endDate     
        return True

    def _getNonCommonConfig(self, config):
        ignoreStop = config['ignoreStop']
        if ignoreStop.strip().lower()=='true':
            self.__ignoreStop = True
        else:
            self.__ignoreStop = False
        
        dateRange = config[SinaMarginTradeHisMgr.FILE_CONFIG_DATERANGE]
        self.__startDate = dateRange[SinaMarginTradeHisMgr.FILE_CONFIG_STARTDATE]
        self.__endDate = dateRange[SinaMarginTradeHisMgr.FILE_CONFIG_ENDDATE]
        
    def _initTask(self):
        
        if self.__ignoreStop:
            sql = 'SELECT stockid, market FROM %s WHERE stockid NOT IN (SELECT stockid FROM %s)'% (SinaConstants.TABLE_SINA_STOCKINFO, SinaConstants.TABLE_SINA_STOCKINFO_STOP)
        else:
            sql = 'SELECT stockid, market FROM %s' % (SinaConstants.TABLE_SINA_STOCKINFO)
        if self.dbProxy.execute(sql)>0:
            self._taskList.extend([(x[0],x[1]) for x in self.dbProxy.cur.fetchall()])
        
        #for test
        #self._taskList.append(('000001','SZ'))

class SinaMarginTradeHisCrawler(CrawlerBase):
    URL = 'http://vip.stock.finance.sina.com.cn/q/go.php/vInvestConsult/kind/rzrq/index.phtml?symbol=%s&bdate=%s&edate=%s'
    PARA_MARKET = 'market'
    PARA_STOCKID = 'stockid'
    PARA_START_DATE = 'startDate'
    PARA_END_DATE = 'endDate'
    def __init__(self, controller, dbProxy, request):
        super(SinaMarginTradeHisCrawler, self).__init__(controller, dbProxy, request)
        self.__url = None
        self.__parsePara(request)

    def run(self):
        super(SinaMarginTradeHisCrawler, self).run()
        self.__removeOldData()
        status = self.__crawlHisData()
        self._reportDone(status)

    def __parsePara(self, request):
        self.__market = request[SinaMarginTradeHisCrawler.PARA_MARKET]
        self.__stockid = request[SinaMarginTradeHisCrawler.PARA_STOCKID]
        self.__startDate = request[SinaMarginTradeHisCrawler.PARA_START_DATE]
        self.__endDate = request[SinaMarginTradeHisCrawler.PARA_END_DATE]
        
        self.__url = SinaMarginTradeHisCrawler.URL % (self.__market.lower()+self.__stockid, self.__startDate, self.__endDate)

    def __parseHisData(self, content):
        lindex = content.find('<table class="list_table" id="dataTable">')
        if lindex<0:
            self.logger.warn('No dataTable for %s', self.__url)
            return None
        soup = BeautifulSoup(content[lindex:])
        tables = soup.findAll('table',{'id':'dataTable'})
        if len(tables) == 0:
            Logging.LOGGER.error('No dataTable found for url:%s', self.__url)
            return None
        trs = tables[0].findAll('tr')
        if len(trs)<3:
            return None
        
        hisDataList = list()
        for tr in trs[3:]:
            tds = tr.findAll('td')
            hisDataList.append(HisData(
                                       tds[1].text.strip(),#date
                                       self.__convertNumericData(tds[2], float),
                                       self.__convertNumericData(tds[3], float),
                                       self.__convertNumericData(tds[5], float),
                                       self.__convertNumericData(tds[6], int),
                                       self.__convertNumericData(tds[7], int),
                                       ))
        
        return hisDataList
    
    def __convertNumericData(self, td, ctype):
        text = td.text.strip()
        if text == '--':
            return -1
        if ctype == int:
            index = text.find('.')
            if index>=0:
                text = text[:index]
        return ctype(text)
    
    def __removeOldData(self):
        sql = 'DELETE FROM sina_margin_trading_detail WHERE market="%s" AND stockid="%s" AND date>="%s" AND date<="%s"' %(
                            self.__market,
                            self.__stockid,
                            self.__startDate,
                            self.__endDate)
        if self.dbProxy.execute(sql)>0:
            self.dbProxy.commit()
        
    def __writeDb(self, hisDataList):
        #start = 0
        #batchSize = 10
        #while start<len(hisDataList):
            #end = min(start + batchSize, len(hisDataList)-1)
            sql = 'INSERT INTO sina_margin_trading_detail (stockid, market, date, financing_remaining_amount,financing_purchase_amount,loan_remaining_amount,loan_remaining_vol,loan_sell_vol) values '
            values = list()
            #self.logger.debug('start:%d, end:%d', start, end)
            #for hisData in hisDataList[start:end]:
            for hisData in hisDataList:
                valuestr = '("%s","%s", "%s", %f, %f, %f, %d, %d)' %\
                                (
                                 self.__stockid,
                                 self.__market,
                                 hisData.trans_date,
                                 hisData.financing_remaining_amount,
                                 hisData.financing_purchase_amount,
                                 hisData.loan_remaining_amount,
                                 hisData.loan_remaining_vol,
                                 hisData.loan_sell_vol
                                 )
                values.append(valuestr)
            sql = sql + ','.join(values)
            if self.dbProxy.execute(sql)>0:
                self.totalNum+=len(values)
                self.dbProxy.commit()
            else:
                return False
            #start += batchSize
            #time.sleep(0.05)
            return True

    def __crawlHisData(self):
        self.logger.info('Begin to Crawl %s', self.__url)
        status = CrawlerConstants.VAL_STATUS_FINISH
        try:            
            content = self._fetchContent(self.__url)
            if content is None:
                status = CrawlerConstants.VAL_STATUS_FAILURE
            else:
                hisDataList = self.__parseHisData(content)
                if hisDataList is not None and len(hisDataList)>0:
                    self.__writeDb(hisDataList)
        except Exception:
            traceInfo = traceback.format_exc()
            self.logger.warn('Fail to Crawl %s:%s', self.__url, traceInfo)
            status = CrawlerConstants.VAL_STATUS_FAILURE
        self.logger.info('Finish to Crawl %s', self.__url)
        return status    
            

class HisData(object):
    def __init__(self,
                 trans_date, 
                 financing_remaining_amount,
                 financing_purchase_amount,
                 loan_remaining_amount,
                 loan_remaining_vol,
                 loan_sell_vol):
        self.trans_date = trans_date
        self.financing_remaining_amount = financing_remaining_amount
        self.financing_purchase_amount = financing_purchase_amount
        self.loan_remaining_amount = loan_remaining_amount
        self.loan_remaining_vol = loan_remaining_vol
        self.loan_sell_vol = loan_sell_vol

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('smh'):
        print 'Previous Sina Magin Trading History Crawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('smh', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    smh = SinaMarginTradeHisMgr('conf/crawler/smh.cfg')
    smh.start()
    pidutils = PIDUtils('smh', smh.shutDown, 5, smh.logger)
    pidutils.start()
    sys.exit(0)
                            