'''
Created on Sep 8, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])

from com.stocklens.stock.common.utils import Logging,  PIDUtils , OneTimeThread,\
    CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase, CrawlerCommon
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
from com.stocklens.stock.data.sina.sinadaily import StockDailyData, SinaDailyCrawler
from bs4 import BeautifulSoup
import datetime
import traceback

class SinaDailyFixManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'sdf'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(SinaDailyFixManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(SinaDailyFixManager.LOGGER_NAME_CRAWL)
    
    def start(self):
        super(SinaDailyFixManager, self).start()

    def _initTask(self):
        sql = 'SELECT stockid, market, min(date), max(date) from sina_stockdaily WHERE date>="2010-01-01" AND stockid IN ("000001") group by market, stockid'
        if self.dbProxy.execute(sql)>0:
            self._taskList.extend([(x[0],x[1], x[2], x[3]) for x in self.dbProxy.cur.fetchall()])
        
        self.logger.debug('Initially %d task items were generated', len(self._taskList))

    def _generateTask(self, task, checkTaskList=True):
        if super(SinaDailyFixManager, self)._generateTask(task,checkTaskList) is False:
            return False
        task[CrawlerConstants.PARA_CLASS] = SinaDailyFixCrawler
        taskItem = self._taskList.pop(0)
        task[SinaDailyFixCrawler.PARA_STOCKID] = taskItem[0]
        task[SinaDailyFixCrawler.PARA_MARKET] = taskItem[1]
        task[SinaDailyFixCrawler.PARA_STARTDATE] = taskItem[2]
        task[SinaDailyFixCrawler.PARA_ENDDATE] = taskItem[3]
        self.logger.debug('taskItem:%s,%s,%s,%s', 
                          taskItem[0],
                          taskItem[1],
                          taskItem[2],
                          taskItem[3],)
        
        return True

class SinaDailyFixCrawler(CrawlerBase):
    PARA_MARKET = SinaConstants.PARA_MARKET
    PARA_URL = 'url'
    PARA_STOCKID = 'stockid'
    PARA_STARTDATE = 'startDate'
    PARA_ENDDATE = 'endDate'
    def __init__(self, controller, dbProxy, request):
        super(SinaDailyFixCrawler, self).__init__(controller, dbProxy, request)
        self.__parsePara(request)

    def __parsePara(self, request):
        self.__stockid = request[SinaDailyFixCrawler.PARA_STOCKID]
        self.__market = request[SinaDailyFixCrawler.PARA_MARKET]
        self.__startDate = request[SinaDailyFixCrawler.PARA_STARTDATE]
        self.__endDate = request[SinaDailyFixCrawler.PARA_ENDDATE]
    
    def __getMissingDates(self):
        sql = 'SELECT date FROM sina_stockdaily WHERE stockid="%s" AND market="%s" AND date>="%s" AND date<="%s" ORDER BY date asc' %(
                self.__stockid,
                self.__market,
                self.__startDate.strftime('%Y-%m-%d'),
                self.__endDate.strftime('%Y-%m-%d')
                )        
        missingDates = list()
        if self.dbProxy.execute(sql)>0:
            results = self.dbProxy.cur.fetchall()
            beginDate = results[0][0]
            index = 1
            while index<len(results)-1:
                nextDate = results[index][0]
                while nextDate > beginDate + datetime.timedelta(days=1):
                    beginDate += datetime.timedelta(days=1)
                    if CTPUtils.isHoliday(CTPUtils.Singleton, beginDate) is False:
                        missingDates.append(beginDate)
                
                beginDate = nextDate
                index+=1
        return missingDates
    def __generateUrl(self, year=None, quarter=None, fq=False):
        if fq:
            url = SinaDailyCrawler.SINA_STOCK_FQ_URI % self.__stockid
        else:
            url = SinaDailyCrawler.SINA_STOCK_BFQ_URI % self.__stockid
        return url + '?year=%d&jidu=%d' % (year, quarter)
        

    def run(self):
        super(SinaDailyFixCrawler, self).run()
        self.__missingDates = self.__getMissingDates()
        self.logger.debug('MissingDates for %s are: length=%d, content=%s', self.__stockid, len(self.__missingDates), self.__missingDates)
        insertValues = dict()
        while len(self.__missingDates)>0:
            missingDate = self.__missingDates[0]
            (year, quarter) = self.__getYearAndQuarter(missingDate)
            if self.isShutDown():
                return CrawlerConstants.VAL_STATUS_STOP
            bfqurl = self.__generateUrl(year, quarter)
            bfqcontent = self._fetchContent(bfqurl)
            bfqvalues = self.__parseBFQStockData(bfqcontent, bfqurl)
            if bfqvalues is not None and len(bfqvalues)>0:
                self.controller.randomSleep()
                fqurl = self.__generateUrl(year, quarter, fq=True)
                fqcontent = self._fetchContent(fqurl)
                finalvalues = self.__parseFQStockData(fqcontent, fqurl, bfqvalues)
                insertValues.update(finalvalues)
                self.__removeDates(bfqvalues.keys())
            else:
                self.__missingDates.pop(0)
            self.controller.randomSleep()
        #self.__insertToTable(insertValues)                   
        self.logger.debug('To be insert dates for %s after handling are: length=%d, content=%s', self.__stockid, len(insertValues), insertValues.keys())
        status = CrawlerConstants.VAL_STATUS_FINISH
        self._reportDone(status)

    def __getYearAndQuarter(self, missingDate):
        year = missingDate.year
        quarter = 4
        if missingDate < datetime.date(year, 4, 1):
            quarter = 1
        elif missingDate < datetime.date(year, 7,1):
            quarter = 2
        elif missingDate < datetime.date(year, 10, 1):
            quarter = 3
        
        return (year, quarter)

    def __removeDates(self, datesStrList):
        for sdate in datesStrList:
            d = datetime.datetime.strptime(sdate,"%Y-%m-%d")
            dd = datetime.date(d.year, d.month, d.day)
            self.__missingDates.remove(dd)
            
                 
    def __isDateInRange(self, sdate):
        d = datetime.datetime.strptime(sdate,"%Y-%m-%d")
        dd = datetime.date(d.year, d.month, d.day)
        if dd in self.__missingDates:
            return True
        return False
    
    def __parseRaw(self, content, url):
        lindex = content.find('<table id="FundHoldSharesTable"')
        if lindex<0:
            self.logger.warn('No FundHoldSharesTable table for %s', url)
            return None
        soup = BeautifulSoup(content[lindex:])
        fundHoldSharesTable = soup.findAll('table', {'id':'FundHoldSharesTable'})
        if len(fundHoldSharesTable)==0:
            Logging.LOGGER.error('No fundHoldSharesTable found for url:%s', url)
            return None
        
        trs = fundHoldSharesTable[0].findAll('tr')
        if len(trs)<3:
            return None
        
        return trs[2:]
            
    def __parseBFQStockData(self, content, url):
        if content == '':
            self.logger.debug('content is none:%s', url)
            return None
        trs = self.__parseRaw(content, url)
        if trs is None or len(trs)==0:
            self.logger.debug('No trs for url:%s', url)
            return None
        values = dict()
        for i in range(0,len(trs)):
            try:
                tds = trs[i].findAll('td')
                sdate = str(tds[0].text.strip())
                if self.__isDateInRange(sdate) is False:
                    continue
                open_bfq = float(tds[1].text.strip())
                high_bfq = float(tds[2].text.strip())
                close_bfq = float(tds[3].text.strip())
                low_bfq = float(tds[4].text.strip())
                vol = int(float(tds[5].text.strip()))
                amount = float(tds[6].text.strip())
                values[sdate] = StockDailyData(sdate, open_bfq, high_bfq, low_bfq, close_bfq, vol,amount)
            except Exception as e:
                self._recordFailure(url, 'Fail to parse row %d, %s'% (i,str(e)))
                self.logger.error('Fail to parse row')
                continue
        return values        

    def __parseFQStockData(self, content, url, bfqvalues):
        if content == '':
            return bfqvalues
        trs = self.__parseRaw(content, url)
        if trs is None or len(trs)==0:
            return bfqvalues
        for i in range(0,len(trs)):
            try:
                tds = trs[i].findAll('td')
                sdate = str(tds[0].text.strip())
                if self.__isDateInRange(sdate) is False:
                    continue
                open_hfq = float(tds[1].text.strip())
                high_hfq = float(tds[2].text.strip())
                close_hfq = float(tds[3].text.strip())
                low_hfq = float(tds[4].text.strip())
                factor = float(tds[7].text.strip())
                bfqvalues[sdate].open_hfq=open_hfq
                bfqvalues[sdate].high_hfq=high_hfq
                bfqvalues[sdate].close_hfq=close_hfq
                bfqvalues[sdate].low_hfq=low_hfq
                bfqvalues[sdate].factor=factor
            except Exception as e:
                self._recordFailure(url, 'Fail to parse row %d, %s'% (i,str(e)))
                self.logger.error('Fail to parse row')
                continue
            
        return bfqvalues

    def __insertToTable(self, stockdailydataDict):
        if len(stockdailydataDict)==0:
            self.logger.debug('Nothing to insert')
            return False
        svalues = list()
        #Logging.LOGGER.debug('processing values len=%d' % len(stockdailydataDict.values()))
        for key in stockdailydataDict:
            value = stockdailydataDict[key]
            valuestr = '("%s", "%s", "%s", %f, %f, %f, %f, %f, %f, %f, %f, %d, %f, %f)'%\
                          (self.__stockId, self.__market, value.sdate,
                           value.open_bfq, value.high_bfq, value.low_bfq, value.close_bfq, 
                           value.open_hfq, value.high_hfq, value.low_hfq, value.close_hfq,
                           value.vol,value.amount, value.factor)
            
            svalues.append(valuestr)
            
        sql = 'INSERT INTO %s (stockid, market, date, open_bfq, high_bfq, low_bfq, close_bfq, open_hfq, high_hfq, low_hfq, close_hfq, vol, amount, factor) values ' % (SinaConstants.TABLE_SINA_STOCKDAILY)
        sql = sql + ','.join(svalues)
        if self.dbProxy.execute(sql)>0:
            self.totalNum+=len(stockdailydataDict)
            self.dbProxy.commit()
            return True
        return False            
if __name__ == '__main__':
    if PIDUtils.isPidFileExist('sdf'):
        print 'Previous Sina Daily Fix Crawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('sdf', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    sdf = SinaDailyFixManager('conf/crawler/sdf.cfg')
    sdf.start()
    pidutils = PIDUtils('sdf', sdf.shutDown, 5, sdf.logger)
    pidutils.start()
    sys.exit(0)
