'''
Created on Feb 5, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])

from com.stocklens.stock.common.utils import Logging,  PIDUtils , OneTimeThread
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase, CrawlerCommon
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
from bs4 import BeautifulSoup
import datetime
import traceback
from rpy2                                import robjects

class SinaDailyConstants(object):
    LOGGER_NAME_CRAWL_SSD = 'ssd'
    FILE_CONFIG_ISLATEST = 'isLatest'
    FILE_CONFIG_STARTDATE = 'startDate'
    FILE_CONFIG_ENDDATE = 'endDate'
    FILE_CONFIG_STOCKFILE = 'stockFile'
    FILE_CONFIG_DATERANGE = 'dateRange'
    FILE_CONFIG_INDEX = 'index'

class SinaDailyManager(CrawlerManager):
    '''
    classdocs
    '''

    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(SinaDailyManager, self).__init__(json_config_file, 0.1, SinaDailyCrawler.FAILUREURL_PATTERN)
        self.logger = Logging.getLogger(SinaDailyConstants.LOGGER_NAME_CRAWL_SSD)

    def _generateTask(self, task, checkTaskList=True):
        if super(SinaDailyManager, self)._generateTask(task,checkTaskList) is False:
            return False
        task[CrawlerConstants.PARA_CLASS] = SinaDailyCrawler
        taskItem = self._taskList.pop(0)
        if type(taskItem)==tuple:
            #it is normal task
            task[SinaDailyCrawler.PARA_STARTDATE] = self.__startDate
            task[SinaDailyCrawler.PARA_ENDDATE] = self.__endDate
            task[SinaDailyCrawler.PARA_STOCKID] = taskItem[0]
            task[SinaDailyCrawler.PARA_MARKET] = taskItem[1]
        else:
            #it is failure task
            task[SinaDailyCrawler.PARA_URL] = taskItem
        
        return True

    def _getNonCommonConfig(self, config):
        ignoreStop = config['ignoreStop']
        if ignoreStop.strip().lower()=='true':
            self.__ignoreStop = True
        else:
            self.__ignoreStop = False
        dateRange = config[SinaDailyConstants.FILE_CONFIG_DATERANGE]              
        isLatest = dateRange[SinaDailyConstants.FILE_CONFIG_ISLATEST]
        if isLatest.strip().lower()=='true':
            self.__startDate = None#datetime.date(1981,1,1).strftime('%Y-%m-%d')
            self.__endDate = datetime.date.today().strftime('%Y-%m-%d')
        else:
            self.__startDate = dateRange[SinaDailyConstants.FILE_CONFIG_STARTDATE]
            self.__endDate = dateRange[SinaDailyConstants.FILE_CONFIG_ENDDATE]
        
        self.__stockFile = None
        if SinaDailyConstants.FILE_CONFIG_STOCKFILE in config:
            self.__stockFile = config[SinaDailyConstants.FILE_CONFIG_STOCKFILE]
            if self.__stockFile.strip()=="":
                self.__stockFile = None
        
        self.__stockIndex = config[SinaDailyConstants.FILE_CONFIG_INDEX]
    def __getStockFromFile(self, filename):
        if filename is None:
            return list()
        try:
            f = open(filename)
            fl = list(f)
            f.close()
            stockList = filter(lambda y: not y.startswith('#'), map(lambda x:x.replace('\n','').replace('\r','').split('\t')[0], fl))
            stockList = map(lambda x: (x,SinaConstants.VAL_MARKET_SH) if x.startswith('6') else (x,SinaConstants.VAL_MARKET_SZ), stockList)
            self._taskList.extend(stockList)            
        except:
            return list()
        
    def _initTask(self):
        if self.__stockFile is not None:
            self.__getStockFromFile(self.__stockFile)
        else:
            if self.__stockIndex is not None and len(self.__stockIndex)>0:
                self._taskList.extend([(x[2:],x[0:2]) for x in self.__stockIndex])
            if self.__ignoreStop:
                sql = 'SELECT stockid, market FROM %s WHERE stockid NOT IN (SELECT stockid FROM %s)'% (SinaConstants.TABLE_SINA_STOCKINFO, SinaConstants.TABLE_SINA_STOCKINFO_STOP)
            else:
                sql = 'SELECT stockid, market FROM %s' % (SinaConstants.TABLE_SINA_STOCKINFO)
            if self.dbProxy.execute(sql)>0:
                self._taskList.extend([(x[0],x[1]) for x in self.dbProxy.cur.fetchall()])
    
    def _handleNotifyDone(self, request):
        super(SinaDailyManager, self)._handleNotifyDone(request)
        if self._isFinish():
            OneTimeThread(self.__updateFuquan, {}).start()
    
    def __updateFuquan(self):
        try:
            self.logger.info('Begin to update Fuquan Data')
            ro = robjects.r
            rs = 'source("r/stats_t0.R")'
            ro(rs)
            self.logger.debug('[R]Source file complete')
            connStatement = 'conn <- dbConnect(MySQL(), user="%s", password="%s", dbname="%s", host="%s")' % (self._dbUser, self._dbPasswd, self._dbName, self._dbHost)
            ro(connStatement)
            self.logger.debug('[R]Create db conn')
            beginDate = (datetime.datetime.now() - datetime.timedelta(3)).strftime('%Y-%m-%d')
            stockType = "nostop"
            if self.__ignoreStop is False:
                stockType = "all"
            updateStatement = 'updateDBFuQuanPrice(stockid="%s",isFromDB=TRUE,timespan="%s/")' % (stockType, beginDate)
            ro(updateStatement)
            self.logger.debug('[R]Update Fuquan')
            ro('dbDisconnect(conn)')
            self.logger.debug('[R]Close DB conn')
            self.logger.info('Finish update Fuquan Data')
        except:
            traceInfo = traceback.format_exc()
            self.logger.error('Fail to update FuQuan Data:%s', traceInfo)
class SinaDailyCrawler(CrawlerBase):
    FAILUREURL_PATTERN = 'vMS_MarketHistory'
    PARA_MARKET = SinaConstants.PARA_MARKET
    PARA_URL = 'url'
    PARA_STOCKID = 'stockid'
    PARA_STARTDATE = SinaDailyConstants.FILE_CONFIG_STARTDATE
    PARA_ENDDATE = SinaDailyConstants.FILE_CONFIG_ENDDATE
    
    SINA_STOCK_FQ_URI = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_FuQuanMarketHistory/stockid/%s.phtml'
    SINA_STOCK_BFQ_URI = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/%s.phtml'
    def __init__(self, controller, dbProxy, request):
        super(SinaDailyCrawler, self).__init__(controller, dbProxy, request)
        self.__parsePara(request)
        self.__dateRanges = list()
        self.__startYear = None
        self.__endYear = None
        self.logger = Logging.getLogger(SinaDailyConstants.LOGGER_NAME_CRAWL_SSD)

    def run(self):
        super(SinaDailyCrawler, self).run()
        if self.__url is not None:
            status = self.__runForSpecificUrl()
        else:
            status = self.__runForAStock()
        
        self._reportDone(status)
    
    def __runForSpecificUrl(self):
        self.logger.info('Begin to Crawl %s', self.__url)
        status = CrawlerConstants.VAL_STATUS_FINISH
        try:            
            content = self._fetchContent(self.__url)
            if content is None:
                status = CrawlerConstants.VAL_STATUS_FAILURE
            else:
                bfqvalues = self.__parseBFQStockData(content, self.__url, checkDate=False)
                if bfqvalues is not None and len(bfqvalues)>0:
                    fqurl = self.__url.replace('vMS_MarketHistory', 'vMS_FuQuanMarketHistory')
                    self.controller.randomSleep()
                    fqcontent = self._fetchContent(fqurl)
                    finalvalues = self.__parseFQStockData(fqcontent, fqurl, bfqvalues,checkDate=False)                
                    if self.__insertToTable(finalvalues):
                        self._recoverFailure(self.__url)
                
        except Exception:
            traceInfo = traceback.format_exc()
            self.logger.warn('Fail to Crawl %s:%s', self.__url, traceInfo)
            status = CrawlerConstants.VAL_STATUS_FAILURE
        self.logger.info('Finish to Crawl %s', self.__url)
        return status    
    
    def __runForAStock(self):
        self.logger.info('Begin to Crawl %s', self.__stockId)
        status = CrawlerConstants.VAL_STATUS_FINISH
        try:
            self.__calculateDateRange()
            initbfqurl = self.__generateUrl()
            initcontent = self._fetchContent(initbfqurl)
            if initcontent is None:
                status = CrawlerConstants.VAL_STATUS_FAILURE
            else:
                (selected_year, selected_quarter) = self.__getSelectedYearAndQuarter(initcontent)
                year_quarter_dict = self.__generateQueryYearsAndQuarters(initcontent)
                self.logger.debug('DateRange for %s is %s', self.__stockId, self.__dateRanges)
                self.logger.debug('year_quarter_dict for %s is %s', self.__stockId, year_quarter_dict)
                for year in sorted(year_quarter_dict.keys(), reverse = False):
                    for quarter in sorted(year_quarter_dict[year], reverse=False):
                        if self.isShutDown():
                            return CrawlerConstants.VAL_STATUS_STOP
                        self.logger.info('Parse Year %d Quarter %d for %s', year, quarter, self.__stockId)
                        if year != selected_year or quarter != selected_quarter:
                            bfqurl = self.__generateUrl(year, quarter, fq=False)
                            bfqcontent = self._fetchContent(bfqurl)                            
                        else:
                            bfqurl = initbfqurl
                            bfqcontent = initcontent
                        bfqvalues = self.__parseBFQStockData(bfqcontent, bfqurl)
                        if bfqvalues is not None and len(bfqvalues)>0:
                            self.controller.randomSleep()
                            fqurl = self.__generateUrl(year, quarter, fq=True)
                            fqcontent = self._fetchContent(fqurl)
                            finalvalues = self.__parseFQStockData(fqcontent, fqurl, bfqvalues)
                            self.__insertToTable(finalvalues)
                            self._recoverFailure(bfqurl)
                        self.controller.randomSleep()                                
        except Exception:
            traceInfo = traceback.format_exc()
            self.logger.warn('Fail to Crawl %s:%s', self.__stockId, traceInfo)
            status = CrawlerConstants.VAL_STATUS_FAILURE
        
        self.logger.info('Finish to Crawl %s', self.__stockId)    
        return status
    
    def __parsePara(self, request):
        if SinaDailyCrawler.PARA_URL in request:
            self.__url = request[SinaDailyCrawler.PARA_URL]
            index = self.__url.find('.phtml')
            self.__stockId = self.__url[index-6:index]
            if self.__stockId.startswith('6'):
                self.__market = SinaConstants.VAL_MARKET_SH
            else:
                self.__market = SinaConstants.VAL_MARKET_SZ
        else:
            self.__url = None
            self.__market = request[SinaDailyCrawler.PARA_MARKET]
            self.__startDate = request[SinaDailyCrawler.PARA_STARTDATE]
            self.__endDate = request[SinaDailyCrawler.PARA_ENDDATE]
            self.__stockId = request[SinaDailyCrawler.PARA_STOCKID]
    
    def __generateUrl(self, year=None, quarter=None, fq=False):
        if fq:
            url = SinaDailyCrawler.SINA_STOCK_FQ_URI % self.__stockId
        else:
            url = SinaDailyCrawler.SINA_STOCK_BFQ_URI % self.__stockId
        if year!=None and quarter!=None:
            url += '?year=%d&jidu=%d' % (year, quarter)
        return url
    
    def __calculateDateRange(self):
        where_list = list()
        if self.__startDate!= None:
            where_list.append(' date>= "%s" ' % self.__startDate)
        where_list.append(' date<= "%s" ' % self.__endDate)
        where_list.append(' stockid="%s" ' % self.__stockId)
        where_list.append(' market="%s" ' % self.__market)

        sql = 'SELECT min(date), max(date) from %s WHERE %s' % (SinaConstants.TABLE_SINA_STOCKDAILY, ' AND '.join(where_list))
        self.dbProxy.execute(sql)
        sql_result = self.dbProxy.cur.fetchone()

        if sql_result[0] is not None and sql_result[1] is not None:
            minDate = sql_result[0]
            maxDate = sql_result[1]
            minDatestr = sql_result[0].strftime('%Y-%m-%d')
            maxDatestr = sql_result[1].strftime('%Y-%m-%d')
            if self.__startDate is None:
                #for no startDate means latest
                if maxDatestr<=self.__endDate:
                    self.__dateRanges.append((maxDate+datetime.timedelta(days=1), CrawlerCommon.toDate(self.__endDate)))
                    self.__startYear = (maxDate+datetime.timedelta(days=1)).year
            else:
                #choose the daterange
                if maxDatestr < self.__startDate or minDatestr>self.__endDate:
                    #the specified range has no inter-set with [min,max] scope, use the specified range directly
                    self.__dateRanges.append((CrawlerCommon.toDate(self.__startDate), CrawlerCommon.toDate(self.__endDate)))
                    self.__startYear = CrawlerCommon.toDate(self.__startDate).year
                elif self.__startDate>=minDatestr and self.__endDate<=maxDatestr:
                    #the specified range is totally within [min,max] scope, no need to crawl
                    pass
                else:
                    #there are inter-set between specified range & [min,max] range
                    if minDatestr>self.__startDate:
                        self.__dateRanges.append((CrawlerCommon.toDate(self.__startDate), minDate-datetime.timedelta(days=1)))
                    if maxDatestr < self.__endDate:
                        self.__dateRanges.append((maxDate+datetime.timedelta(days=1), CrawlerCommon.toDate(self.__endDate)))
                    self.__startYear = min(CrawlerCommon.toDate(self.__startDate), maxDate+datetime.timedelta(days=1)).year
        else:
            #this is the first time to crawl this stock, use the specified range
            self.__dateRanges.append((CrawlerCommon.toDate(self.__startDate), CrawlerCommon.toDate(self.__endDate)))
            self.__startYear = CrawlerCommon.toDate(self.__startDate).year
        
        self.__endYear = CrawlerCommon.toDate(self.__endDate).year            

    def __generateQueryYearsAndQuarters(self, content):
        lindex = content.find('<select name="year"')
        if lindex<0:
            self.logger.warn('No year table for %s', self.__url)
            return None
        soup = BeautifulSoup(content[lindex:])
        select_years = soup.findAll('select', {'name':'year'})
        if len(select_years)==0:
            return None
        options = select_years[0].findAll('option')
        
        if len(options)==0:
            return None
        years = filter(lambda x: x>=self.__startYear and x<=self.__endYear and x>=1981,
                        [int(option['value']) for option in options])
        ret = dict()
        for year in years:
            ret[year] = list()
            for quarter in range(4, 0, -1):
                if self.__isInRange(year, quarter):
                    ret[year].append(quarter)
        
        return ret
    
    def __isDateInRange(self, sdate):
        if len(self.__dateRanges)==0:
            return False
        sdate_datetime = datetime.datetime.strptime(sdate, '%Y-%m-%d')
        sdate_datetime = datetime.date(sdate_datetime.year, sdate_datetime.month, sdate_datetime.day)
        for dateRange in self.__dateRanges:
            start = dateRange[0]
            end = dateRange[1]
            if sdate_datetime>=start and sdate_datetime<=end:
                return True
        return False
            
    def __isInRange(self, year, quarter):
        if len(self.__dateRanges)==0:
            return False
        quarter_begin_date = datetime.date(year, (quarter-1)*3+1, 1)
        if quarter == 4:
            quarter_end_date = datetime.date(year, 12, 31)
        else:
            quarter_end_date = datetime.date(year, quarter*3+1, 1) - datetime.timedelta(days=1)
        for dateRange in self.__dateRanges:
            start = dateRange[0]
            end = dateRange[1]
            if (quarter_begin_date>=start and quarter_begin_date<=end) or\
                (quarter_end_date>=start and quarter_end_date<=end) or\
                (quarter_begin_date<=start and quarter_end_date>=end):
                return True
        
        return False
    
    def __getSelectedYearAndQuarter(self, content):

        #find selected year
        lindex = content.find('<select name="year"')
        if lindex<0:
            self.logger.warn('No year table for %s', self.__url)
            return None
        soup = BeautifulSoup(content[lindex:])
        select_years = soup.findAll('select', {'name':'year'})
        options = filter(lambda x:x.has_attr('selected'), select_years[0].findAll('option'))
        selected_year = None
        if len(options)>0:
            selected_year = int(options[0]['value'])
        #find selected quarter
        select_quarters = soup.findAll('select', {'name':'jidu'})
        options = filter(lambda x:x.has_attr('selected'), select_quarters[0].findAll('option'))
        selected_quarter = None
        if len(options)>0:
            selected_quarter = int(options[0]['value'])
        return (selected_year, selected_quarter)

    def __parseRaw(self, content, url):
        lindex = content.find('<table id="FundHoldSharesTable"')
        if lindex<0:
            self.logger.warn('No FundHoldSharesTable table for %s', url)
            return None
        soup = BeautifulSoup(content[lindex:])
        fundHoldSharesTable = soup.findAll('table', {'id':'FundHoldSharesTable'})
        if len(fundHoldSharesTable)==0:
            Logging.LOGGER.error('No fundHoldSharesTable found for url:%s', url)
            self._recordFailure(url, 'No fundHoldSharesTable found')
            return None
        
        trs = fundHoldSharesTable[0].findAll('tr')
        if len(trs)<3:
            return None
        
        return trs[2:]
            
    def __parseBFQStockData(self, content, url, checkDate=True):
        if content == '':
            Logging.LOGGER.debug('content is none:%s', url)
            return None
        trs = self.__parseRaw(content, url)
        if trs is None or len(trs)==0:
            Logging.LOGGER.debug('No trs for url:%s', url)
            return None
        values = dict()
        for i in range(0,len(trs)):
            try:
                tds = trs[i].findAll('td')
                sdate = str(tds[0].text.strip())
                if checkDate and not self.__isDateInRange(sdate):
                    continue
                open_bfq = float(tds[1].text.strip())
                high_bfq = float(tds[2].text.strip())
                close_bfq = float(tds[3].text.strip())
                low_bfq = float(tds[4].text.strip())
                vol = int(float(tds[5].text.strip()))
                amount = float(tds[6].text.strip())
                values[sdate] = StockDailyData(sdate, open_bfq, high_bfq, low_bfq, close_bfq, vol,amount)
            except Exception as e:
                self._recordFailure(url, 'Fail to parse row %d, %s'% (i,str(e)))
                continue
        return values        

    def __parseFQStockData(self, content, url, bfqvalues, checkDate=True):
        if content == '':
            return bfqvalues
        trs = self.__parseRaw(content, url)
        if trs is None or len(trs)==0:
            return bfqvalues
        for i in range(0,len(trs)):
            try:
                tds = trs[i].findAll('td')
                sdate = str(tds[0].text.strip())
                if checkDate and not self.__isDateInRange(sdate):
                    continue
                open_hfq = float(tds[1].text.strip())
                high_hfq = float(tds[2].text.strip())
                close_hfq = float(tds[3].text.strip())
                low_hfq = float(tds[4].text.strip())
                factor = float(tds[7].text.strip())
                bfqvalues[sdate].open_hfq=open_hfq
                bfqvalues[sdate].high_hfq=high_hfq
                bfqvalues[sdate].close_hfq=close_hfq
                bfqvalues[sdate].low_hfq=low_hfq
                bfqvalues[sdate].factor=factor
            except Exception as e:
                self._recordFailure(url, 'Fail to parse row %d, %s'% (i,str(e)))
                continue
            
        return bfqvalues

    def __insertToTable(self, stockdailydataDict):
        if len(stockdailydataDict)==0:
            self.logger.debug('Nothing to insert')
            return False
        svalues = list()
        #Logging.LOGGER.debug('processing values len=%d' % len(stockdailydataDict.values()))
        for key in stockdailydataDict:
            value = stockdailydataDict[key]
            valuestr = '("%s", "%s", "%s", %f, %f, %f, %f, %f, %f, %f, %f, %d, %f, %f)'%\
                          (self.__stockId, self.__market, value.sdate,
                           value.open_bfq, value.high_bfq, value.low_bfq, value.close_bfq, 
                           value.open_hfq, value.high_hfq, value.low_hfq, value.close_hfq,
                           value.vol,value.amount, value.factor)
            
            svalues.append(valuestr)
            
        sql = 'INSERT INTO %s (stockid, market, date, open_bfq, high_bfq, low_bfq, close_bfq, open_hfq, high_hfq, low_hfq, close_hfq, vol, amount, factor) values ' % (SinaConstants.TABLE_SINA_STOCKDAILY)
        sql = sql + ','.join(svalues)
        if self.dbProxy.execute(sql)>0:
            self.totalNum+=len(stockdailydataDict)
            self.dbProxy.commit()
            return True
        return False
class StockDailyData(object):
    def __init__(self, sdate, open_bfq, high_bfq, low_bfq, close_bfq, vol,amount):
        self.sdate = sdate
        self.open_bfq = open_bfq
        self.high_bfq = high_bfq
        self.low_bfq = low_bfq
        self.close_bfq = close_bfq
        self.vol = vol
        self.amount = amount
        self.factor = 1        
        self.open_hfq = 0.0
        self.high_hfq = 0.0
        self.close_hfq = 0.0
        self.low_hfq = 0.0
if __name__ == '__main__':
    if PIDUtils.isPidFileExist('ssd'):
        print 'Previous Sina Daily Crawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('ssd', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    ssd = SinaDailyManager('conf/crawler/ssd.cfg')
    ssd.start()
    pidutils = PIDUtils('ssd', ssd.shutDown, 5, ssd.logger)
    pidutils.start()
    sys.exit(0)
