'''
Created on Apr 10, 2016

@author: eyaomai
'''

import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
import datetime, time

class EmHgtZjlCrawlerManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'ehz'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(EmHgtZjlCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(EmHgtZjlCrawlerManager.LOGGER_NAME_CRAWL)
        
    def _initTask(self):
        self._taskList.append((1000, 1, 'hgtzjl_sh.csv'))
        self._taskList.append((1000, 2, 'hgtzjl_hk.csv'))

    def _generateTask(self, task, checkTaskList=True):
        if super(EmHgtZjlCrawlerManager, self)._generateTask(task, True) is False:
            return False
        pageSize, market, sheetName = self._taskList.pop(0)
        task[EmHgtZjlCrawler.PARA_PAGESIZE] = pageSize 
        task[EmHgtZjlCrawler.PARA_MAREKT] = market 
        task[EmHgtZjlCrawler.PARA_SHEET_NAME] = sheetName 
        task[CrawlerConstants.PARA_CLASS] = EmHgtZjlCrawler
        return True

class EmHgtZjlCrawler(CrawlerBase):
    PARA_PAGESIZE = 'pageSize'
    PARA_MAREKT = 'market'
    PARA_SHEET_NAME = 'sheetName'
    JS_URL ='http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?type=SHT&sty=SHTHPS&st=2&sr=-1&p=%d&ps=%d&js=var%%20elzHZMbw={pages:(pc),data:[(x)]}&mkt=%d&rt=48676058'
    
    def __init__(self, controller, dbProxy, request):
        super(EmHgtZjlCrawler, self).__init__(controller, dbProxy, request)
        self.__pageSize = request[EmHgtZjlCrawler.PARA_PAGESIZE]
        self.__market = request[EmHgtZjlCrawler.PARA_MAREKT]
        self.__sheetName = request[EmHgtZjlCrawler.PARA_SHEET_NAME]
        self.logger = controller.logger
    
    def run(self):
        super(EmHgtZjlCrawler, self).run()
        status = self.__parse()
        self._reportDone(status)
        self.logger.info('Finish Crawl')

    def __parse(self):
        pageNum = 1
        datadict = dict()
        while True:
            url = EmHgtZjlCrawler.JS_URL % (pageNum, self.__pageSize, self.__market)
            content = self._fetchContent(url)
            lindex = content.find('{')
            rindex = content.rfind('}')
            js = content[lindex:rindex+1]
            js = js.replace('pages','"pages"').replace('data','"data"')
            jo = json.loads(js)
            totalPage = jo['pages']
            data = jo['data']
            for item in data:
                fields = item.split(',')
                
                datadict[fields[0]] = (fields[5], fields[2], fields[3], fields[10], fields[11])
            
            if pageNum<totalPage:
                pageNum+=1
            else:
                break
            
            self.controller.randomSleep()
        
        if len(datadict)>0:
            sortedList = sorted(datadict.iteritems(), key=lambda x:x[0], reverse = False)
            fn = open(self.__sheetName, 'w')
            if self.__market == 1:
                total_zjlr = 130.0
            else:
                total_zjlr = 105.0
            fn.write('date,zjlr,jme,index,pnl\n')
            for item in sortedList:
                sdate = item[0]
                zjlr = total_zjlr-round(float(item[1][0])/100.0,2)
                jme = round((float(item[1][1])-float(item[1][2]))/100.0,2)
                index_value = item[1][3]
                index_pnl = item[1][4][:-1]
                fn.write('%s,%s,%s,%s,%s\n'%(sdate, zjlr, jme, index_value, index_pnl))
            fn.flush()
            fn.close()
        
        return CrawlerConstants.VAL_STATUS_FINISH

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('ehz'):
        print 'Previous East Money GuzhiQihuo signal process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('ehz', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    CTPUtils.Singleton = CTPUtils()
    CTPUtils.start(CTPUtils.Singleton)
    ehz = EmHgtZjlCrawlerManager('conf/crawler/ehz.cfg')
    ehz.start()
    pidutils = PIDUtils('ehz', ehz.shutDown, 5, ehz.logger)
    pidutils.start()
    sys.exit(0)                      