# -*- coding: UTF-8 -*-
'''
Created on Apr 3, 2016

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
import datetime, time

class EmGzqhJccCrawlerManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'egj'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(EmGzqhJccCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(EmGzqhJccCrawlerManager.LOGGER_NAME_CRAWL)

    def _initTask(self):
        self._taskList.extend([
                               ('http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?type=QHCC&sty=JCW&code=if1604&cb=callback&callback=callback&_=','GZ300.csv'),
                               ('http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?type=QHCC&sty=JCW&code=ic1604&cb=callback&callback=callback&_=','GZ500.csv'),
                               ('http://datainterface.eastmoney.com/EM_DataCenter/JS.aspx?type=QHCC&sty=JCW&code=ih1604&cb=callback&callback=callback&_=','GZ50.csv')
                               ])
    def _generateTask(self, task, checkTaskList=True):
        if super(EmGzqhJccCrawlerManager, self)._generateTask(task, True) is False:
            return False
        (url, name) = self._taskList.pop(0)
        task[EmGzqhJccCrawler.PARA_URL] = url 
        task[EmGzqhJccCrawler.PARA_SHEET_NAME] = name
        task[CrawlerConstants.PARA_CLASS] = EmGzqhJccCrawler
        return True

class EmGzqhJccCrawler(CrawlerBase):
    PARA_URL = 'url'
    PARA_SHEET_NAME = 'sheetname'
    def __init__(self, controller, dbProxy, request):
        super(EmGzqhJccCrawler, self).__init__(controller, dbProxy, request)
        self.url = request[EmGzqhJccCrawler.PARA_URL]
        self.sheetname = request[EmGzqhJccCrawler.PARA_SHEET_NAME]
        self.logger = controller.logger

    def run(self):
        super(EmGzqhJccCrawler, self).run()
        self._reportDone(self.__parse())
        
    def __parse(self):
        content = self._fetchContent(self.url+str(int(time.time()))+'000')
        lindex = content.find('(')
        rindex  = content.rfind(')')
        if lindex<0 or rindex<0:
            self.logger.error('response format not expected')
            return CrawlerConstants.VAL_STATUS_FAILURE
        
        jobj = json.loads(content[lindex+1:rindex])
        data = jobj[0]['series4']['data']
        if len(data)==0:
            self.logger.warn('No data')
            return CrawlerConstants.VAL_STATUS_FAILURE
        f = open(self.sheetname, 'w')
        f.write('日期,结算价,多头持仓量,多头持仓增减,空头持仓量,空头持仓增减,净持仓量,沪深300指数,沪深300涨跌\n')
        for item in data:
            f.write(item+'\n')
        f.flush()
        f.close()
        return CrawlerConstants.VAL_STATUS_FINISH

if __name__ == '__main__':
    if PIDUtils.isPidFileExist('egj'):
        print 'Previous East Money GuzhiQihuo signal process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('egj', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    CTPUtils.Singleton = CTPUtils()
    CTPUtils.Singleton.start()
    egj = EmGzqhJccCrawlerManager('conf/crawler/egj.cfg')
    egj.start()
    pidutils = PIDUtils('egj', egj.shutDown, 5, egj.logger)
    pidutils.start()
    sys.exit(0)                  