'''
Created on Jan 23, 2016

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import os
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
'''
def updateJg(al):
  import MySQLdb
  sql = 'insert into em_jg (jgid,jgname) values '
  values = ['("%s","%s")'% (item['href'][item['href'].rfind('/')+1:item['href'].rfind('.')], item.text.strip()) for item in al]
  sql += ','.join(values)
  para = {'host':'localhost','user':'dataservice', 'passwd':'dataservice', 'db':'tradingdatadb', 'charset':'utf8', 'port':3306}
  conn = MySQLdb.Connect(**para)
  cur=conn.cursor()
  cur.execute(sql)
  conn.commit()
  cur.close()
  conn.close()
'''
class EmLhbYybCrawlerManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'ely'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(EmLhbYybCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(EmLhbYybCrawlerManager.LOGGER_NAME_CRAWL)

    def _initTask(self):
        #self._taskList.append(3000)
        sql = 'select jgid from em_jg'
        if self.dbProxy.execute(sql)>0:
            self._taskList.extend([(x[0]) for x in self.dbProxy.cur.fetchall()])

    def _generateTask(self, task, checkTaskList=True):
        if super(EmLhbYybCrawlerManager, self)._generateTask(task, True) is False:
            return False
        jgid = self._taskList.pop(0)
        task[EmLhbYybCrawler.PARA_JGID] = jgid 
        task[CrawlerConstants.PARA_CLASS] = EmLhbYybCrawler
        return True
        

class EmLhbYybCrawler(CrawlerBase):
    JS_URL = 'http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?type=LHB&sty=YYTJBY&stat={period}&sr=0&st=1&p=%d&ps=300&js=var%%20QbkwWAxQ={%%22data%%22:[(x)],%%22pages%%22:%%22(pc)%%22,%%22update%%22:%%22(ud)%%22}&code=%s&rt=48450634'
    PARA_JGID = 'JGID'
    def __init__(self, controller, dbProxy, request):
        super(EmLhbYybCrawler, self).__init__(controller, dbProxy, request)
        self.__jgid = request[EmLhbYybCrawler.PARA_JGID]
        self.logger = controller.logger

    def run(self):
        super(EmLhbYybCrawler, self).run()
        if self.__cleanDB() is False:
            self._reportDone(CrawlerConstants.VAL_STATUS_FAILURE)
            return
        status = self.__parse()
        self._reportDone(status)
        self.logger.info('Finish Crawl')
        
    def __cleanDB(self):
        dsql = 'DELETE from em_lhb_yyb where jgid="%s"' % (self.__jgid)
        self.logger.info('plan to delete for %s', self.__jgid)
        if self.dbProxy.execute(dsql)>=0:                
            self.dbProxy.commit()
            return True
        return False
    
    def __updateDB(self, values):
        self.logger.info('plan to insert %d for %s', len(values), self.__jgid)
        isql = 'INSERT INTO em_lhb_yyb (jgid,yybid,yybname,district,dealamount,buyamount,sellamount,buycount,sellcount,firstthree) values '
        if self.dbProxy.execute(isql + ','.join(values))>0:
            self.totalNum+=len(values)
            self.dbProxy.commit()        
    
    def __parse(self):
        pageNum = 1
        DB_SIZE = 50
        values = list()
        while True:
            url = EmLhbYybCrawler.JS_URL % (pageNum, self.__jgid)
            content = self._fetchContent(url)
            lindex = content.find('{')
            rindex = content.rfind('}')
            js = content[lindex:rindex+1]
            try:
                jo = json.loads(js)
            except:
                self.logger.warn('No data for %s', self.__jgid)
                return CrawlerConstants.VAL_STATUS_FINISH
            totalPage = int(jo['pages'])
            data = jo['data']
            #self.logger.info('Totalpage:%d, currentpage:%d, datacount:%d', totalPage, pageNum, len(data))
            for item in data:
                fields = item.split(',')
                yybid = fields[0]
                yybname = fields[12]
                district = fields[6]
                dealamount = float(fields[10])
                buyamount = float(fields[11])
                sellamount = float(fields[5])
                buycount = int(fields[3])
                sellcount = int(fields[2])
                firstthree = fields[13]
                valuestr = '("%s","%s","%s","%s",%f,%f,%f,%d,%d,"%s")' %\
                            (self.__jgid, yybid, yybname, district,
                             dealamount, buyamount, sellamount,
                             buycount, sellcount, firstthree
                             )
                values.append(valuestr)
                if len(values) > DB_SIZE:
                    self.__updateDB(values)
                    values = list()
            
            if pageNum<totalPage:
                pageNum+=1
            else:
                break
            self.controller.randomSleep()

        if len(values) != 0:
            self.__updateDB(values)
            values = list()

        return CrawlerConstants.VAL_STATUS_FINISH            

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('ely'):
        print 'Previous East Money Lhb Yyb process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('ely', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    ely = EmLhbYybCrawlerManager('conf/crawler/ely.cfg')
    ely.start()
    pidutils = PIDUtils('ely', ely.shutDown, 5, ely.logger)
    pidutils.start()
    sys.exit(0)                                    