'''
Created on Jan 9, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])

from com.stocklens.stock.common.utils import RequestHandler,\
Logging, MySqlProxy, Audit, PIDUtils,StoppableThread, OneTimeThread
from com.stocklens.stock.data.crawlercommon import CrawlerCommon
from bs4 import BeautifulSoup
import datetime,time
import random
import re
import json
class SinaStockCrawler(RequestHandler):
    '''
    classdocs
    '''


    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(SinaStockCrawler, self).__init__(sleep_time=0.1)
        self.__json_config_file = json_config_file
        config = self.readConfig(json_config_file)
        self.__dbHost = config['DailyCrawler']['dbHost']
        self.__dbPort = config['DailyCrawler']['dbPort']
        self.__dbUser = config['DailyCrawler']['dbUser']
        self.__dbPasswd = config['DailyCrawler']['dbPasswd']
        self.__dbName = config['DailyCrawler']['dbName']
        self.__stockdaily_name = config['DailyCrawler']['stockdaily_name']
        self.__stockinfo_name = config['DailyCrawler']['stockinfo_name']
        failureHandling = config['DailyCrawler']['failureHandling']
        ignoreStop = config['DailyCrawler']['ignoreStop']
        self.__stockFile = None
        if 'stockFile' in config['DailyCrawler']:
            self.__stockFile = config['DailyCrawler']['stockFile']
            if self.__stockFile.strip()=="":
                self.__stockFile = None
        self.__ignoreStop = True
        if ignoreStop.strip().lower() == 'false':
            self.__ignoreStop = False
        self.__failureHandling = True
        if failureHandling.strip().lower() == 'false':
            self.__failureHandling = False
        
        isLatest = config['DailyCrawler']['dateRange']['isLatest']
        if isLatest.strip().lower()=='true':
            self.__startDate = None#datetime.date(1981,1,1).strftime('%Y-%m-%d')
            self.__endDate = datetime.date.today().strftime('%Y-%m-%d')
        else:
            self.__startDate = config['DailyCrawler']['dateRange']['startDate']
            self.__endDate = config['DailyCrawler']['dateRange']['endDate']
        self._numThread = config['DailyCrawler']['numThread']
        self.__sleepRange = config['DailyCrawler']['sleepRange']
        logfile = config['DailyCrawler']['logconfig']        
        import logging.config
        logging.config.fileConfig(logfile)
        Logging.LOGGER = logging.getLogger('ssc')
        self.__workers = dict()
        self.__configAudit = Audit(self, 60, 'config')
        self.__isValid = True
        self.__validate()
        self.dbProxy = MySqlProxy(self.__dbHost, self.__dbPort, self.__dbUser, self.__dbPasswd, self.__dbName)
        self.__stockList = list()
        self.__stockHandlingSet = set()
        self.__isFinish = False
        self.__startTime = None
        self.__totalTasks = 0
        self.__fullStockSetName = set() 
        self.__stopStockset = set()
        self.__stockListTasks = dict()
        self.__failureUrl = list()
        self.__debugMaxTime = 0.0
        self.__debugMinTime = 1.0
        
        self.__workerDbProxy = MySqlProxy(self.__dbHost, self.__dbPort, self.__dbUser, self.__dbPasswd, self.__dbName)
        self.__dbUpdater = DBUpdater(self.__workerDbProxy)
    
    def isFinish(self):
        return self.__isFinish    
    
    def __getStockList(self):
        if self.__stockFile is None:
            return
        try:
            f = open(self.__stockFile)
            fl = list(f)
            f.close()
            stockList = filter(lambda y: not y.startswith('#'), map(lambda x:x.replace('\n','').replace('\r','').split('\t')[0], fl))
            self.__stockList = map(lambda x: (x,'SH') if x.startswith('6') else (x,'SZ'), stockList)            
        except:
            pass
    
    def __writeStockList(self):
        now_str = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
        logdirectory = './log/%s' % now_str
        os.mkdir(logdirectory)
        sse_finish = sorted([y[0]+'\n' for y in filter(lambda x: x[1]=='SH', self.__stockHandlingSet)])
        sze_finish = sorted([y[0]+'\n' for y in filter(lambda x: x[1]=='SZ', self.__stockHandlingSet)])
        sse_tobedone = sorted([y[0]+'\n' for y in filter(lambda x: x[1]=='SH', self.__stockList)])
        sze_tobedone = sorted([y[0]+'\n' for y in filter(lambda x: x[1]=='SZ', self.__stockList)])
        with open(logdirectory+'/sse_finish', 'w') as f:
            f.writelines(sse_finish)
            
        with open(logdirectory+'/sze_finish', 'w') as f:
            f.writelines(sze_finish)

        with open(logdirectory+'/sse_tobedone', 'w') as f:
            f.writelines(sse_tobedone)

        with open(logdirectory+'/sze_tobedone', 'w') as f:
            f.writelines(sze_tobedone)
    
    def __findFailureTask(self):
        sql = 'SELECT url, market from %s_failure where status>0 AND status<3 AND url like "%%vMS_MarketHistory%%"' % self.__stockdaily_name
        self.dbProxy.execute(sql)
        self.__failureUrl = [(x[0],x[1]) for x in self.dbProxy.cur.fetchall()]
        Logging.LOGGER.info('Totally %d failure tasks', len(self.__failureUrl))
    
    def __startWorkers(self, begin, end, isInit=False):
        for i in range(begin, end):
            dbProxy = MySqlProxy(self.__dbHost, self.__dbPort, self.__dbUser, self.__dbPasswd, self.__dbName)
            worker = CrawlerWorker(self, i, dbProxy, self.__dbUpdater)
            self.__workers[i] = worker
            worker.start()
            Logging.LOGGER.info('Start worker %d', i)
        
        if isInit is True and self.__stockFile is None:
            if self.__failureHandling:
                self.__findFailureTask()
                for i in range(begin, end):
                    self.__sendTask(i)
                    time.sleep(0.5)
            else:
                if end > begin+1:
                    self.__sendStockListTask(begin, begin+1)
                else:
                    self.__sendStockListTask(begin, begin)
        else:
            for i in range(begin, end):
                self.__sendTask(i)
                time.sleep(0.5)
    def __sendStockListTask(self, sh, sz):
        request = dict()
        request['action'] = 'crawlStockList'
        request['type'] =  'crawlStockList'
        request['market'] = 'SH'
        request['pageSize'] = 40
        request['sleep_range'] = self.__sleepRange
        self.__workers[sh].addRequest(request)
        if sh not in self.__stockListTasks:            
            self.__stockListTasks[sh]=1
        else:
            self.__stockListTasks[sh]+=1
        Logging.LOGGER.info('Send task (%s) to worker %d', request, sh)
        time.sleep(0.5)

        request = dict()
        request['action'] = 'crawlStockList'
        request['type'] =  'crawlStockList'
        request['market'] = 'SZ'
        request['pageSize'] = 40
        request['sleep_range'] = self.__sleepRange
        self.__workers[sz].addRequest(request)
        if sz not in self.__stockListTasks:            
            self.__stockListTasks[sz]=1
        else:
            self.__stockListTasks[sz]+=1
        Logging.LOGGER.info('Send task (%s) to worker %d', request, sz)
                            
    def __shutDownWorkers(self):
        for i in self.__workers.keys():
            self.__workers[i].shutDown()
            Logging.LOGGER.info('Shutdown worker %d', i)
    def __sendTask(self, i):
        if self.__isValid is False:
            return False
        if i not in self.__workers:
            Logging.LOGGER.error('No worker %d ', i)
            return False
        #failure task in priority
        if len(self.__failureUrl)>0:
            request = dict()
            request['action'] = 'crawlStockDaily'
            request['type'] =  'crawlFailure'
            request['table_name'] = 'sina_stockdaily'
            request['sleep_range'] = self.__sleepRange
            (url, market) = self.__failureUrl.pop(0)
            request['url']=str(url)
            request['market'] = str(market)
            self.__workers[i].addRequest(request)
            Logging.LOGGER.info('Send task (%s) to worker %d', request, i)
        elif len(self.__stockList)>0:
            (stockid, market) = self.__stockList.pop(0)
            self.__stockHandlingSet.add((stockid, market))
            request = dict()
            request['action'] = 'crawlStockDaily'
            request['type'] =  'crawlStockDaily'
            request['stockid'] = str(stockid)
            request['market'] = str(market)
            request['table_name'] = 'sina_stockdaily'
            request['sleep_range'] = self.__sleepRange
            request['startDate'] = self.__startDate
            request['endDate'] = self.__endDate
            self.__workers[i].addRequest(request)
            Logging.LOGGER.info('Send task (%s) to worker %d', request, i)
        else:
            Logging.LOGGER.info('No task available')
            self.__isFinish = True
            return False
                
        return True
                
    def __validate(self):
        self.__isValid = True
        if len(self.__sleepRange)!=2:
            Logging.LOGGER.error('sleepRange is not correct')
            self.__isValid = False
        try:
            if float(self.__sleepRange[1])<=float(self.__sleepRange[0]):
                Logging.LOGGER.error('sleepRange is not correct')
                self.__isValid = False
        except:
            Logging.LOGGER.error('sleepRange format is not float')
            self.__isValid = False
        
        try:
            startDate = None
            if self.__startDate!= None and self.__startDate.strip()!="":
                startDate = datetime.datetime.strptime(self.__startDate,'%Y-%m-%d')
            
            endDate = datetime.date.today()
            if self.__endDate!=None and self.__endDate.strip()!="":
                endDate = datetime.datetime.strptime(self.__endDate,'%Y-%m-%d')
            
            if startDate is not None and endDate<startDate:
                Logging.LOGGER.error('endDate must be larger than startDate')
                self.__isValid = False
        except:
            Logging.LOGGER.error('Invalid format of startDate and/or endDate')
            self.__isValid = False
            
    def start(self):
        super(SinaStockCrawler, self).start()
        self.__configAudit.start()
        #self.__getStockList()
        self.__startTime = time.time()
        self.dbProxy.start()
        self.__getStockList()
        self.__workerDbProxy.start()
        self.__startWorkers(0, self._numThread, True)

    def _shutDown(self):
        self.__configAudit.shutDown()
        self.__shutDownWorkers()
        self.dbProxy.shutDown()
        self.__workerDbProxy.shutDown()
        time.sleep(10)
        super(SinaStockCrawler, self)._shutDown()
        self.__writeStockList()        
    '''
    def addRequest(self, request, priority=0):
        RequestHandler.addRequest(self, request, priority=priority)
        Logging.LOGGER.debug('__debugMaxTime:%f', self.__debugMaxTime)
        Logging.LOGGER.debug('__debugMinTime:%f', self.__debugMinTime)
        if request['action']=='notifyDone':
            Logging.LOGGER.debug('Current queue size:%d', self.getQueueSize())
    '''
    def _handleRequest(self, request):
        if super(SinaStockCrawler, self)._handleRequest(request):
            return True
        start = time.time()
        action = request['action']
        if cmp(action, 'audit')==0:
            self.__handleAudit(request)
        elif cmp(action, 'notifyDone')==0:
            self.__handleNotifyDone(request)
        elif cmp(action, 'notifyStockList') ==0:
            OneTimeThread(self.__handleNotifyStockList, {'request':request}).start()
        else:
            Logging.LOGGER.error('Unknown request')
        delta = time.time()-start
        if delta > self.__debugMaxTime:
            self.__debugMaxTime = delta
        if delta < self.__debugMinTime:
            self.__debugMinTime = delta
    
    def __handleAudit(self, request):
        parameter = request['parameter']
        if parameter == 'config':
            self.__handleConfigAudit(request)
        
        else:
            Logging.LOGGER.error('Unknown audit parameter')
    def __handleConfigAudit(self, request):
        config = self.__readConfig(self.__json_config_file)
        self._numThread = config['DailyCrawler']['numThread']
        self.__sleepRange = config['DailyCrawler']['sleepRange']
        self.__validate()
        if self.__isValid is False:
            return
        for identifier in self.__workers.keys():
            self.__workers[identifier].changeSleepRange(self.__sleepRange)        
        if self._numThread>len(self.__workers):
            self.__startWorkers(len(self.__workers), self._numThread)
                
    def __handleNotifyDone(self, request):
        Logging.LOGGER.info('Receiving done from worker: %s', request)
        self.__totalTasks+=1
        rate = (time.time() - self.__startTime)/self.__totalTasks
        Logging.LOGGER.info('Average Rate:%f seconds/task. Current Rate: %f.Total Finished:%d', round(rate,2), request['totaltime'], self.__totalTasks)
        if request['type']=='crawlStockList':
            self.__stockListTasks[request['identifier']]-=1
            allDone = True
            for identifier in self.__stockListTasks.keys():
                if self.__stockListTasks[identifier]>0:
                    allDone = False
            if allDone:
                try:
                    self.__insertTable()
                except:
                    Logging.LOGGER.error('Fail to insert stock table')
        if len(self.__workers)>self._numThread:
            self.__workers[request['identifier']].shutDown()
            Logging.LOGGER.info('Shutdown worker %d', request['identifier'])
            del self.__workers[request['identifier']] 
            return
        self.__sendTask(request['identifier'])
    
    def __handleNotifyStockList(self, request):
        fullStockSetName = request['fullStockSetName'] 
        stopStockset = request['stopStockset']
        newStock = False
        if self.__ignoreStop is False:
            stockList = [(x[0], x[1]) for x in fullStockSetName]
        else:
            stockList = set.difference(
                                       set([(x[0], x[1]) for x in fullStockSetName]),
                                       stopStockset
                                       )
        self.__fullStockSetName.update(fullStockSetName)
        self.__stopStockset.update(stopStockset)
        for stock in stockList:
            if stock not in self.__stockHandlingSet:
                self.__stockList.append(stock)
                newStock = True
        
        if not newStock:
            return
        for i in self.__workers.keys():
            if i!=request['identifier']:
                if self.__sendTask(i) is False:
                    break
                time.sleep(0.1)

    def __insertTable(self):
        #insert full table
        Logging.LOGGER.debug('Insert to tables')
        sql = 'select stockid, market from %s' % (self.__stockinfo_name)
        self.dbProxy.execute(sql)
        resultSet = set([(str(x[0]),str(x[1])) for x in self.dbProxy.cur.fetchall()])
        
        differentSet = set(filter(lambda x: (x[0],x[1]) not in resultSet, self.__fullStockSetName))
        if len(differentSet)>0:
            values = list()
            for newStock in differentSet:
                values.append('("%s","%s")' % (newStock[0], newStock[1]))
            sql = 'INSERT INTO %s (stockid, market) values ' % self.__stockinfo_name
            if self.dbProxy.execute(sql + ','.join(values))>0:
                self.dbProxy.commit()
        
        #update stop table
        sql = 'select stockid, market from %s_stop' % (self.__stockinfo_name)
        self.dbProxy.execute(sql)
        stopresultSet = set([(str(x[0]),str(x[1])) for x in self.dbProxy.cur.fetchall()])
        
        reopenSet = set.difference(stopresultSet, self.__stopStockset)
        newStopSet = set.difference(self.__stopStockset, stopresultSet)
        if len(reopenSet)>0:
            sql = 'DELETE from %s_stop WHERE ' % (self.__stockinfo_name)
            where_list = list()
            for item in reopenSet:
                where_list.append('(stockid="%s" and market="%s")' % (item[0],item[1]))
            if self.dbProxy.execute(sql + ' OR '.join(where_list))>0:
                self.dbProxy.commit()
        
        
        today = datetime.date.today().strftime('%Y-%m-%d')
        
        if len(newStopSet)>0:
            values = list()
            for newStock in newStopSet:
                values.append('("%s","%s","%s")' % (newStock[0], newStock[1], today))
            sql = 'INSERT INTO %s_stop (stockid, market, date) values '% self.__stockinfo_name
            if self.dbProxy.execute(sql + ','.join(values))>0:
                self.dbProxy.commit()
                
                
class CrawlerWorker(RequestHandler):
    def __init__(self, controller, identifier, dbProxy, dbUpdater):
        super(CrawlerWorker, self).__init__(sleep_time=1)
        self.__controller = controller
        self.__identifier = identifier
        self.dbProxy = dbProxy
        self.__dbUpdater = dbUpdater
        self.__displayPrefix = '[WORKER%s]' % str(self.__identifier)
        self.__currentRequest = None
        self.__currentCrawler = None
        self.__sleep_range = [0.5,1]
    
    def isBusy(self):
        return self.__currentCrawler is not None
    
    def changeSleepRange(self, sleep_range):
        self.__sleep_range = sleep_range

    def getDisplayPrefix(self):
        return self.__displayPrefix

    def start(self):
        super(CrawlerWorker, self).start()
    
    def _shutDown(self):
        if self.__currentCrawler is not None:
            self.__currentCrawler.shutDown()
        super(CrawlerWorker, self)._shutDown()
        time.sleep(2)
    
    def _handleRequest(self, request):
        if super(CrawlerWorker, self)._handleRequest(request) is True:
            return True
        action = request['action']
        if cmp(action, 'crawlStockList')==0:
            self.__crawlStockList(request)
        elif cmp(action, 'crawlStockDaily')==0:
            self.__crawlStockDaily(request)
    
    def __crawlStockList(self, request):
        if self.__currentCrawler is not None:
            self.addRequest(request)
            return
        self.__currentRequest = request
        self.__sleep_range = request['sleep_range']
        self.__currentCrawler = StockListCrawler(self, self.dbProxy, request)
        self.__currentCrawler.start()
    
    def __crawlStockDaily(self, request):
        if self.__currentCrawler is not None:
            self.addRequest(request)
            return
        self.__currentRequest = request
        self.__sleep_range = request['sleep_range']
        self.__currentCrawler = StockDailyCrawler(self, self.dbProxy, request)
        self.__currentCrawler.start()

    def notifyDone(self, result):
        self.__currentCrawler.shutDown()
        request = dict()
        request.update(self.__currentRequest)
        request.update(result)
        request['action']='notifyDone'
        request['identifier']  = self.__identifier
        self.__currentCrawler = None
        self.__currentRequest = None
        
        self.__controller.addRequest(request)
        
    
    def notifyStockList(self, fullStockSetName, stopStockset):
        request = dict()
        request.update(self.__currentRequest)
        request['action']='notifyStockList'
        request['identifier']  = self.__identifier
        request['fullStockSetName'] = fullStockSetName
        request['stopStockset'] = stopStockset
        
        self.__controller.addRequest(request)
    
    def updateDb(self, sql):
        request = dict()
        request['action']='dbupdate'
        request['sql']=sql
        self.__dbUpdater.addRequest(request)
                    
    def randomSleep(self):
        sleepTime =self.__sleep_range[0] + (self.__sleep_range[1]-self.__sleep_range[0])*random.random()
        time.sleep(sleepTime)

class StockListCrawler(StoppableThread):
    SINA_STOCK_LIST_SH_URI = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page=%d&num=%d&sort=symbol&asc=1&node=sh_a&symbol=&_s_r_a=init'
    SINA_STOCK_LIST_SZ_URI = 'http://vip.stock.finance.sina.com.cn/quotes_service/api/json_v2.php/Market_Center.getHQNodeData?page=%d&num=%d&sort=symbol&asc=1&node=sz_a&symbol=&_s_r_a=init'
    def __init__(self, controller, dbProxy, request):
        super(StockListCrawler, self).__init__(1)
        self.__controller = controller
        self.dbProxy = dbProxy
        self.__market = request['market']
        self.__pageSize = request['pageSize']
        self.__totalNum = 0

    def run(self):
        starttime = time.time()
        page = 1
        while(self.__crawl(page) is True):            
            page+=1
            self.__controller.randomSleep()
        endtime = time.time()
        result = {'status':'finish','totaltime':endtime-starttime, 'totalNum':self.__totalNum} 
        OneTimeThread(self.__controller.notifyDone, {'result':result}).start()        
    
    def __crawl(self, page):
        content, url = self.__fetchContent(page)
        num = self.__parse(content, url)
        self.__totalNum+=num
        Logging.LOGGER.info('Query %s page %d with %d items return', self.__market, page, num)
        if num < self.__pageSize:
            return False
        return True
                    
    def __fetchContent(self, page):
        if self.__market == 'SH':
            url = StockListCrawler.SINA_STOCK_LIST_SH_URI % (page, self.__pageSize)
        else:
            url = StockListCrawler.SINA_STOCK_LIST_SZ_URI % (page, self.__pageSize)
        try:
            (content, warning) = CrawlerCommon.readContent(url)
        except:
            Logging.LOGGER.error('Fail to fetch content for url:%s', url)
            self.__recordFailure(url, 'Fail to fetch content')
            return ('', url)
        return (content,url)
    
    def __parse(self, content, url):
        
        keywords = ['symbol:','code:','name:','trade:','pricechange:','changepercent:','buy:','sell:','settlement:', 'open:','high:','low:','volume:','amount:','ticktime:','per:','pb:','mktcap:','nmc:','turnoverratio:']
        for keyword in keywords:
            content = content.replace(keyword,'"'+keyword[:-1]+'":')
        json_str = '{"content":%s}'%content
        jo = json.loads(json_str)
        stock_fullSetName = set([(str(y['code']),self.__market, y['name'].encode('utf-8')) for y in jo['content']])
        stock_stopSet = set([(str(y['code']),self.__market) for y in filter(lambda x: float(x['amount'])==0,jo['content'])])
        
        self.__controller.notifyStockList(stock_fullSetName, stock_stopSet)
        return len(stock_fullSetName)
    
    def __recordFailure(self, url, reason):
        pass
        '''
        sql = 'INSERT INTO sina_stockdaily_failure (url, reason) values ("%s","%s")' % (url, reason)
        if self.dbProxy.execute(sql)>0:
            self.dbProxy.commit()
        '''
        
class StockDailyCrawler(StoppableThread):
    SINA_STOCK_FQ_URI = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_FuQuanMarketHistory/stockid/%s.phtml'
    SINA_STOCK_BFQ_URI = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/%s.phtml'
    def __init__(self, controller, dbProxy, request):
        super(StockDailyCrawler, self).__init__(1)
        self.__controller = controller
        self.dbProxy = dbProxy
        self.__startYear = None
        self.__endYear = None
        self.__dateRanges = list()
        self.__type = request['type']
        if self.__type=='crawlStockDaily':
            self.__sinaStockid = request['stockid']
            self.__market = request['market']
            self.__startDate = request['startDate']
            self.__endDate = request['endDate']
        else:
            self.__url = request['url']
            self.__market = request['market']
            index = self.__url.find('.phtml')
            self.__sinaStockid = self.__url[index-6:index]
        self.__table_name = request['table_name']
        
        self.__displayPrefix = controller.getDisplayPrefix()
        self.__totalNum = 0
        self.__totalPage = 0
    
    def __updateDateRange(self):
        where_list = list()
        if self.__startDate!= None:
            where_list.append(' date>= "%s" ' % self.__startDate)
        where_list.append(' date<= "%s" ' % self.__endDate)
        where_list.append(' stockid="%s" ' % self.__sinaStockid)
        where_list.append(' market="%s" ' % self.__market)

        sql = 'SELECT min(date), max(date) from %s WHERE %s' % (self.__table_name, ' AND '.join(where_list))
        self.dbProxy.execute(sql)
        sql_result = self.dbProxy.cur.fetchone()

        if sql_result[0] is not None and sql_result[1] is not None:
            minDate = sql_result[0]
            maxDate = sql_result[1]
            minDatestr = sql_result[0].strftime('%Y-%m-%d')
            maxDatestr = sql_result[1].strftime('%Y-%m-%d')
            if self.__startDate is None:
                #for no startDate means latest
                if maxDatestr<=self.__endDate:
                    self.__dateRanges.append((maxDate+datetime.timedelta(days=1), CrawlerCommon.toDate(self.__endDate)))
                    self.__startYear = (maxDate+datetime.timedelta(days=1)).year
            else:
                #choose the daterange
                if maxDatestr < self.__startDate or minDatestr>self.__endDate:
                    #the specified range has no inter-set with [min,max] scope, use the specified range directly
                    self.__dateRanges.append((CrawlerCommon.toDate(self.__startDate), CrawlerCommon.toDate(self.__endDate)))
                    self.__startYear = CrawlerCommon.toDate(self.__startDate).year
                elif self.__startDate>=minDatestr and self.__endDate<=maxDatestr:
                    #the specified range is totally within [min,max] scope, no need to crawl
                    pass
                else:
                    #there are inter-set between specified range & [min,max] range
                    if minDatestr>self.__startDate:
                        self.__dateRanges.append((CrawlerCommon.toDate(self.__startDate), minDate-datetime.timedelta(days=1)))
                    if maxDatestr < self.__endDate:
                        self.__dateRanges.append((maxDate+datetime.timedelta(days=1), CrawlerCommon.toDate(self.__endDate)))
                    self.__startYear = min(CrawlerCommon.toDate(self.__startDate), maxDate+datetime.timedelta(days=1)).year
        else:
            #this is the first time to crawl this stock, use the specified range
            self.__dateRanges.append((CrawlerCommon.toDate(self.__startDate), CrawlerCommon.toDate(self.__endDate)))
            self.__startYear = CrawlerCommon.toDate(self.__startDate).year
        
        self.__endYear = CrawlerCommon.toDate(self.__endDate).year
        
    def __checkStartEndDate(self):
        if self.__endDate is None:
            return False
        if self.__endDate<self.__startDate:
            return False
        return True
    
    def __genResult(self, status):
        result = {'status':status,'totaltime':time.time()-self.__starttime, 'totalNum':self.__totalNum, 'totalPage':self.__totalPage}
        return result
    def run(self):
        self.__starttime = time.time()
        if self.__type == 'crawlFailure':
            Logging.LOGGER.info('%sBegin to Crawl for %s', self.__displayPrefix, self.__url)
            content, url = self.__fetchContent(self.__url,fq=False) 
            bfqvalues = self.__parseBFQStockData(content, url, checkDate=False)
            count = 0
            if bfqvalues is not None and len(bfqvalues)>0:
                fqurl = url.replace('vMS_MarketHistory', 'vMS_FuQuanMarketHistory')
                self.__controller.randomSleep()
                fqcontent, fqurl = self.__fetchContent(fqurl, fq=True)
                finalvalues = self.__parseFQStockData(fqcontent, fqurl, bfqvalues,checkDate=False)                
                #count = len(finalvalues)
                if self.__insertToTable(finalvalues):
                    self.__updateFailureTable(self.__url)
                
            Logging.LOGGER.info('%sFinish to Crawl for %s with %d records', self.__displayPrefix, self.__url, count)
            self.__controller.notifyDone(self.__genResult('finish'))
            return
        
        Logging.LOGGER.info('%sBegin to Crawl for %s', self.__displayPrefix, self.__sinaStockid)
        if self.__checkStartEndDate() is False:
            self.__controller.notifyDone(self.__genResult('fail'))
            return
        self.__updateDateRange()
        content,url = self.__fetchContent()
        
        (selected_year, selected_quarter) = self.__getSelectedYearAndQuarter(content)
        year_quarter_dict = self.__generateQueryYearsAndQuarters(content)
        Logging.LOGGER.debug('dateRanges:%s', self.__dateRanges)
        Logging.LOGGER.debug('year_quarter_dict:%s', year_quarter_dict)
        for year in sorted(year_quarter_dict.keys(), reverse = False):
            for quarter in sorted(year_quarter_dict[year], reverse=False):
                if self.isShutDown():
                    Logging.LOGGER.info('%sStopped and return', self.__displayPrefix)
                    self.__controller.notifyDone(self.__genResult('stopped'))
                    return
                Logging.LOGGER.info('%sParse Year %d Quarter %d for %s', self.__displayPrefix, year, quarter, self.__sinaStockid)
                if year != selected_year or quarter != selected_quarter:
                    bfqcontent, bfqurl = self.__fetchContent(None, year, quarter)
                else:
                    bfqcontent, bfqurl = content, url
                bfqvalues = self.__parseBFQStockData(bfqcontent, bfqurl)
                if bfqvalues is not None and len(bfqvalues)>0:
                    self.__controller.randomSleep()
                    fqcontent, fqurl = self.__fetchContent(None, year, quarter, fq=True)
                    finalvalues = self.__parseFQStockData(fqcontent, fqurl, bfqvalues)
                    self.__insertToTable(finalvalues)
                self.__controller.randomSleep()
        
        Logging.LOGGER.info('%sFinish to Crawl for %s', self.__displayPrefix, self.__sinaStockid)
        self.__controller.notifyDone(self.__genResult('finish'))
            
    def __fetchContent(self, url=None, year=None, quarter=None, fq=False):
        if url is None:
            if fq:
                url = StockDailyCrawler.SINA_STOCK_FQ_URI % self.__sinaStockid
            else:
                url = StockDailyCrawler.SINA_STOCK_BFQ_URI % self.__sinaStockid
            if year!=None and quarter!=None:
                url += '?year=%d&jidu=%d' % (year, quarter)
        try:
            self.__totalPage+=1
            (content, warning) = CrawlerCommon.readContent(url)
        except:
            Logging.LOGGER.error('Fail to fetch content for url:%s', url)
            self.__recordFailure(url, 'Fail to fetch content')
            return ('', url)
        return (content,url)
    
    def __parseRaw(self, content, url):
        soup = BeautifulSoup(content)
        fundHoldSharesTable = soup.findAll('table', {'id':'FundHoldSharesTable'})
        if len(fundHoldSharesTable)==0:
            Logging.LOGGER.error('%sNo fundHoldSharesTable found for url:%s', self.__displayPrefix, url)
            self.__recordFailure(url, 'No fundHoldSharesTable found')
            return None
        
        trs = fundHoldSharesTable[0].findAll('tr')
        if len(trs)<3:
            return None
        
        return trs[2:]
            
    def __parseFQStockData(self, content, url, bfqvalues, checkDate=True):
        if content == '':
            return bfqvalues
        trs = self.__parseRaw(content, url)
        if trs is None or len(trs)==0:
            return bfqvalues
        for i in range(0,len(trs)):
            try:
                tds = trs[i].findAll('td')
                sdate = str(tds[0].text.strip())
                if checkDate and not self.__isDateInRange(sdate):
                    continue
                open_hfq = float(tds[1].text.strip())
                high_hfq = float(tds[2].text.strip())
                close_hfq = float(tds[3].text.strip())
                low_hfq = float(tds[4].text.strip())
                factor = float(tds[7].text.strip())
                bfqvalues[sdate].open_hfq=open_hfq
                bfqvalues[sdate].high_hfq=high_hfq
                bfqvalues[sdate].close_hfq=close_hfq
                bfqvalues[sdate].low_hfq=low_hfq
                bfqvalues[sdate].factor=factor
            except Exception as e:
                self.__recordFailure(url, 'Fail to parse row %d, %s'% (i,str(e)))
                continue
            
        return bfqvalues
    
    def __updateFailureTable(self, url):
        sql = 'UPDATE %s_failure SET status="FIXED" where url="%s"' % (self.__table_name, url)
        Logging.LOGGER.debug(sql)
        if self.dbProxy.execute(sql)>0:
            self.dbProxy.commit()
    def __insertToTable(self, stockdailydataDict):
        if len(stockdailydataDict)==0:
            Logging.LOGGER.debug('Nothing to insert')
            return False
        svalues = list()
        #Logging.LOGGER.debug('processing values len=%d' % len(stockdailydataDict.values()))
        for key in stockdailydataDict:
            value = stockdailydataDict[key]
            valuestr = '("%s", "%s", "%s", %f, %f, %f, %f, %f, %f, %f, %f, %d, %f, %f)'%\
                          (self.__sinaStockid, self.__market, value.sdate,
                           value.open_bfq, value.high_bfq, value.low_bfq, value.close_bfq, 
                           value.open_hfq, value.high_hfq, value.low_hfq, value.close_hfq,
                           value.vol,value.amount, value.factor)
            
            svalues.append(valuestr)
            
        sql = 'INSERT INTO %s (stockid, market, date, open_bfq, high_bfq, low_bfq, close_bfq, open_hfq, high_hfq, low_hfq, close_hfq, vol, amount, factor) values ' % (self.__table_name)
        sql = sql + ','.join(svalues)
        if self.dbProxy.execute(sql)>0:
            self.__totalNum+=len(stockdailydataDict)
            self.dbProxy.commit()
            return True
        return False
    
    def __parseBFQStockData(self, content, url, checkDate=True):
        if content == '':
            Logging.LOGGER.debug('content is none:%s', url)
            return None
        trs = self.__parseRaw(content, url)
        if trs is None or len(trs)==0:
            Logging.LOGGER.debug('No trs for url:%s', url)
            return None
        values = dict()
        for i in range(0,len(trs)):
            try:
                tds = trs[i].findAll('td')
                sdate = str(tds[0].text.strip())
                if checkDate and not self.__isDateInRange(sdate):
                    continue
                open_bfq = float(tds[1].text.strip())
                high_bfq = float(tds[2].text.strip())
                close_bfq = float(tds[3].text.strip())
                low_bfq = float(tds[4].text.strip())
                vol = int(float(tds[5].text.strip()))
                amount = float(tds[6].text.strip())
                values[sdate] = StockDailyData(sdate, open_bfq, high_bfq, low_bfq, close_bfq, vol,amount)
            except Exception as e:
                self.__recordFailure(url, 'Fail to parse row %d, %s'% (i,str(e)))
                continue
        return values        
    
    def __generateQueryYearsAndQuarters(self, content):
        soup = BeautifulSoup(content)
        select_years = soup.findAll('select', {'name':'year'})
        if len(select_years)==0:
            return None
        options = select_years[0].findAll('option')
        
        if len(options)==0:
            return None
        years = filter(lambda x: x>=self.__startYear and x<=self.__endYear, 
                        [int(option['value']) for option in options])
        ret = dict()
        for year in years:
            ret[year] = list()
            for quarter in range(4, 0, -1):
                if self.__isInRange(year, quarter):
                    ret[year].append(quarter)
        
        return ret
    
    def __isDateInRange(self, sdate):
        if len(self.__dateRanges)==0:
            return False
        sdate_datetime = datetime.datetime.strptime(sdate, '%Y-%m-%d')
        sdate_datetime = datetime.date(sdate_datetime.year, sdate_datetime.month, sdate_datetime.day)
        for dateRange in self.__dateRanges:
            start = dateRange[0]
            end = dateRange[1]
            if sdate_datetime>=start and sdate_datetime<=end:
                return True
        return False
            
    def __isInRange(self, year, quarter):
        if len(self.__dateRanges)==0:
            return False
        quarter_begin_date = datetime.date(year, (quarter-1)*3+1, 1)
        if quarter == 4:
            quarter_end_date = datetime.date(year, 12, 31)
        else:
            quarter_end_date = datetime.date(year, quarter*3+1, 1) - datetime.timedelta(days=1)
        for dateRange in self.__dateRanges:
            start = dateRange[0]
            end = dateRange[1]
            if (quarter_begin_date>=start and quarter_begin_date<=end) or\
                (quarter_end_date>=start and quarter_end_date<=end) or\
                (quarter_begin_date<=start and quarter_end_date>=end):
                return True
        
        return False
    
    def __getSelectedYearAndQuarter(self, content):
        soup = BeautifulSoup(content)
        select_years = soup.findAll('select', {'name':'year'})
        if len(select_years)==0:
            return None
        options = filter(lambda x:x.has_attr('selected'), select_years[0].findAll('option'))
        selected_year = None
        if len(options)>0:
            selected_year = int(options[0]['value'])
        
        select_quarters = soup.findAll('select', {'name':'jidu'})
        if len(select_quarters)==0:
            return None
        options = filter(lambda x:x.has_attr('selected'), select_quarters[0].findAll('option'))
        selected_quarter = None
        if len(options)>0:
            selected_quarter = int(options[0]['value'])
        
        return (selected_year, selected_quarter)
    
    def __recordFailure(self, url, reason):
        sql = 'SELECT count(*) from %s_failure where url="%s"' % (self.__table_name, url)
        self.dbProxy.execute(sql)
        count = self.dbProxy.cur.fetchone()[0]
        if count>0:
            sql = 'UPDATE %s_failure SET status=status+1 where url="%s"' % (self.__table_name, url)
        else:
            index = url.find('year=')
            year = url[index+5:index+9]
            index= url.find('jidu=')
            quarter = url[index+5:index+9]
            if url.find('vMS_MarketHistory')>0:
                stype = 'BFQ'
            else:
                stype='FQ'
            sql = 'INSERT INTO %s_failure (url, stockid, market, year, quarter,stype reason) values ("%s","%s","%s",%d, %d, "%s", "%s")' %\
                    (self.__table_name, url, self.__sinaStockid, self.__market, year, quarter, stype, reason)
        #self.__controller.updateDb(sql)
        if self.dbProxy.execute(sql)>0:
            self.dbProxy.commit()
        
class StockDailyData(object):
    def __init__(self, sdate, open_bfq, high_bfq, low_bfq, close_bfq, vol,amount):
        self.sdate = sdate
        self.open_bfq = open_bfq
        self.high_bfq = high_bfq
        self.low_bfq = low_bfq
        self.close_bfq = close_bfq
        self.vol = vol
        self.amount = amount
        self.factor = 1        
        self.open_hfq = 0.0
        self.high_hfq = 0.0
        self.close_hfq = 0.0
        self.low_hfq = 0.0

class DBUpdater(RequestHandler):
    def __init__(self, dbProxy):
        super(DBUpdater, self).__init__(sleep_time=0.001)
        self.dbProxy = dbProxy
    
    def _handleRequest(self, request):
        if super(DBUpdater, self)._handleRequest(request):
            return True
        action = request['action']
        
        if action == 'dbupdate':
            return True
        
        return False
    
    def __handleDBUpdate(self, request):
        try:
            sql = request['sql']
            if self.dbProxy.execute(sql)>0:
                self.dbProxy.commit()
        except:
            pass
if __name__ == '__main__':
    if PIDUtils.isPidFileExist('ssc'):
        print 'Previous sinastockcrawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('ssc', pid)

    ssc = SinaStockCrawler('conf/ssc.cfg')
    ssc.start()
    pidutils = PIDUtils('ssc', ssc.shutDown, 5)
    pidutils.start()
    sys.exit(0)
        
        