# -*- coding:utf-8 -*-
'''
Created on Jan 16, 2016

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import os
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
import datetime, time
from decimal import Decimal

EMAILS = [
          "343208108@qq.com",
          "10455006@qq.com"
          ]
class EmRtRankCrawlerManager(CrawlerManager):
    '''
    classdocs
    '''

    LOGGER_NAME_CRAWL = 'err'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(EmRtRankCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(EmRtRankCrawlerManager.LOGGER_NAME_CRAWL)

    def _initTask(self):
        self._taskList.append(3000)

    def _generateTask(self, task, checkTaskList=True):
        if super(EmRtRankCrawlerManager, self)._generateTask(task, True) is False:
            return False
        pageSize = self._taskList.pop(0)
        task[EmRtRankCrawler.PARA_PAGESIZE] = pageSize 
        task[CrawlerConstants.PARA_CLASS] = EmRtRankCrawler
        return True

class EmRtRankCrawler(CrawlerBase):        
    PARA_PAGESIZE = 'pageSize'
    JS_URL = 'http://hqdigi2.eastmoney.com/EM_Quote2010NumericApplication/index.aspx?type=s&sortType=C&sortRule=-1&pageSize=%d&page=%d&jsName=quote_123&style=33'

    def __init__(self, controller, dbProxy, request):
        super(EmRtRankCrawler, self).__init__(controller, dbProxy, request)
        self.__pageSize = request[EmRtRankCrawler.PARA_PAGESIZE]
        self.logger = controller.logger

    def __gentestdata(self):
        tmp = 0
        tp = 0
        np = 0
        if tmp == 0:
            tp = 10
            np = 10
        elif tmp == 1:
            tp = 52
            np = 13
        elif tmp == 2:
            tp = 105
            np = 10
        elif tmp == 3:
            tp = 160
            np = 17
        elif tmp == 4:
            tp = 18
            np = 66
        elif tmp == 5:
            tp = 19
            np = 111
        elif tmp == 6:
            tp = 51
            np = 303
        elif tmp == 7:
            tp = 77
            np = 404
        tmp += 1
        if tmp >7:
            tmp = 0
        rankDict = {
                'p10':tp,
                'n10':np,
                'p5':tp+10,
                'n5':np+10
                }
        return rankDict
    
    def run(self):
        super(EmRtRankCrawler, self).run()
        pstatus = 0
        nstatus = 0
        pthreshold = [50,100,150]
        nthreshold = [50, 100,300]
        lastRankDict = None
        while True:
            now = datetime.datetime.now()
            if now.strftime('%H:%M:%S')>'15:01:00':
                break
            if not CTPUtils.isMDTime(CTPUtils.Singleton, now):
                time.sleep(5)
                continue
            rankDict = self.__parse(lastRankDict)
            lastRankDict = rankDict
            #rankDict = self.__gentestdata()
            pindex = self.__checkIndex(rankDict['p10'], pthreshold)
            nindex = self.__checkIndex(rankDict['n10'], nthreshold)
            #self.logger.info('pstatus:%d, pindex:%d, nstatus:%d, pstatus:%d', pstatus, pindex, nstatus, nindex)
            update = False
            if pstatus!=pindex:
                pstatus = pindex
                update = True
            if nstatus != nindex:
                nstatus = nindex
                update = True
            if update:
                self.logger.info('plan to send email as pl change:%s',rankDict)
                self.__sendEmail(rankDict)    
            time.sleep(5)
            self.controller.randomSleep()
        self._reportDone(CrawlerConstants.VAL_STATUS_FINISH )
        self.logger.info('Finish Crawl')
    
    def __checkIndex(self, value, threshold):
        for i in range(len(threshold)):
            if value< threshold[i]:
                return i
        return len(threshold)
    
    def __sendEmail(self, rankDict):

        content = '''
股票涨跌幅统计
    涨停：%d
   跌停：%d
   涨幅超5%%（不含涨停）：%d
   跌幅超5%%（不含跌停）：%d 
'''% (rankDict['p10'], rankDict['n10'], rankDict['p5'], rankDict['n5'])
        title = '股票涨跌幅：%d跌停，%d涨停'%(rankDict['n10'], rankDict['p10'])
        echoStr = 'echo "%s" | mail -s "%s" %s' % (
                                              content, 
                                              title,
                                              ' '.join(EMAILS))
        
        os.popen(echoStr)
    
    def __updateDB(self, rankDict, sdatetime):
            dsql = 'DELETE from em_rtrank where name in ("%s")'%'","'.join(rankDict.keys())
            if self.dbProxy.execute(dsql)>0:                
                self.dbProxy.commit()
            isql = 'INSERT INTO em_rtrank (name, value, sdatetime) values '
            values = list()
            for key in rankDict:
                valueStr = '("'+key+'","'+str(rankDict[key])+'", "'+sdatetime+'")'
                values.append(valueStr)
            if self.dbProxy.execute(isql + ','.join(values))>0:
                self.totalNum+=len(values)
                self.dbProxy.commit()        

    def __translateFloat(self, text):
        if text == '-':
            return 0
        elif text[-1]=='%':
            return float(text[:-1])/100.0
        else:
            return float(text)
    
    def __parse(self, lastRankDict):
        rankDict = {
                    'p10':0,
                    'n10':0,
                    'p5':0,
                    'n5':0
                    }
        pageNum = 1
        sdatetime = None
        while True:
            url = EmRtRankCrawler.JS_URL % (self.__pageSize,pageNum)
            content = self._fetchContent(url)
            lindex = content.find('{')
            rindex = content.rfind('}')
            js = content[lindex:rindex+1]
            js=js.replace('rank','"rank"').replace('pages','"pages"')
            jo = json.loads(js)
            totalPage = jo['pages']
            data = jo['rank']
            for item in data:
                fields = item.split(',')
                pl = self.__translateFloat(fields[11])
                copen = self.__translateFloat(fields[4])
                if pl == 0 and copen == 0:
                    break
                if sdatetime is None:
                    sdatetime = fields[28]
                elif sdatetime < fields[28]:
                    sdatetime = fields[28]
                lastclose = self.__translateFloat(fields[3])
                upper = float(Decimal(str(lastclose*1.1)).quantize(Decimal('.01'), rounding='ROUND_HALF_UP'))
                lower = float(Decimal(str(lastclose*0.9)).quantize(Decimal('.01'), rounding='ROUND_HALF_UP'))

                currentprice = self.__translateFloat(fields[5])
                if currentprice >= upper:
                    rankDict['p10'] += 1
                elif pl >= 0.05:
                    rankDict['p5'] += 1
                elif currentprice <= lower:
                    rankDict['n10'] += 1
                elif pl <= -0.05:
                    rankDict['n5'] += 1

            if pageNum<totalPage:
                pageNum+=1
            else:
                break
            self.controller.randomSleep()
        if rankDict!=lastRankDict:
            self.__updateDB(rankDict, sdatetime)
        return rankDict

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('err'):
        print 'Previous East Money Ranking process is on-going, please stop it firstly'
        sys.exit(1)
    pid = os.getpid()
    PIDUtils.writePid('err', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    CTPUtils.Singleton = CTPUtils()
    CTPUtils.Singleton.start()
    err = EmRtRankCrawlerManager('conf/crawler/err.cfg')
    err.start()
    pidutils = PIDUtils('err', err.shutDown, 5, err.logger)
    pidutils.start()
    sys.exit(0)                            