import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import traceback
import json
from com.stocklens.stock.common.utils import Logging,  PIDUtils, CTPUtils
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase
from com.stocklens.stock.data.sina.sinacommon import SinaConstants
import datetime, time
from bs4 import BeautifulSoup

class GFModelSignalCrawlerManager(CrawlerManager):
    LOGGER_NAME_CRAWL = 'gms'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(GFModelSignalCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(GFModelSignalCrawlerManager.LOGGER_NAME_CRAWL)
    
    def _initTask(self):
        self._taskList.extend([
                               ('http://218.19.190.27/TDhistory.asp','TD.csv'),
                               ('http://218.19.190.27/HTMhistory.asp','HTM.csv'),
                               ('http://218.19.190.27/LLThistory.asp','LLT.csv'),
                               ('http://218.19.190.27/PHASEhistory.asp','PHA.csv'),
                               ('http://218.19.190.27/TD300history.asp','TD300.csv'),
                               ('http://218.19.190.27/HWAVEhistory.asp','HWAVE.csv'),
                               ('http://218.19.190.27/PARABOLA300history.asp','PARABOLA300.csv'),
                               ('http://218.19.190.27/PARABOLA399006history.asp','PARABOLA399006.csv'),
                               ('http://218.19.190.27/WFFThistory.asp', 'WFFT.csv'),
                               ('http://218.19.190.27/HOMhistory.asp','HOM.csv'),
                               ])

    def _generateTask(self, task, checkTaskList=True):
        if super(GFModelSignalCrawlerManager, self)._generateTask(task, True) is False:
            return False
        (url, name) = self._taskList.pop(0)
        task[GFModelSignalCrawler.PARA_URL] = url 
        task[GFModelSignalCrawler.PARA_SHEET_NAME] = name
        task[CrawlerConstants.PARA_CLASS] = GFModelSignalCrawler
        return True

class GFModelSignalCrawler(CrawlerBase):
    PARA_URL = 'url'
    PARA_SHEET_NAME = 'sheetname'
     
    def __init__(self, controller, dbProxy, request):
        super(GFModelSignalCrawler, self).__init__(controller, dbProxy, request)
        self.url = request[GFModelSignalCrawler.PARA_URL]
        self.sheetname = request[GFModelSignalCrawler.PARA_SHEET_NAME]
        self.logger = controller.logger

    def run(self):
        super(GFModelSignalCrawler, self).run()
        self._reportDone(self.__parse())
        
    def __parse(self):
        content = self._fetchContent(self.url)
        lindex = content.find('<table')
        rindex = content.find('</table>')
        if lindex<0 or rindex<0:
            self.logger.error('No table found')
            return CrawlerConstants.VAL_STATUS_FAILURE
        soup = BeautifulSoup(content[lindex:rindex+8])
        tables = soup.findAll('table')
        if len(tables)!=1:
            self.logger.error('Table not expected:%d', len(tables))
            return CrawlerConstants.VAL_STATUS_FAILURE
        trs = tables[0].findAll('tr')
        if len(trs)<1:
            self.logger.error('TRs not expected:%d', len(trs))
            return CrawlerConstants.VAL_STATUS_FAILURE
        f = open(self.sheetname, 'w')
        f.write('Date,Signal\n')
        for tr in trs[1:]:
            tds = tr.findAll('td')
            if len(tds)!=2:
                self.logger.warn('unexpected td')
                continue
            signalDate = tds[0].text
            sig_dir_avatar = tds[1].text.strip()
            sig_dir = None
            if sig_dir_avatar == u'\u2191':
                sig_dir = 1
            elif sig_dir_avatar == u'\u2192':
                sig_dir = 0
            else:
                sig_dir = -1
            f.write('%s,%d\n' % (signalDate, sig_dir))
        
        f.flush()
        f.close()
        return CrawlerConstants.VAL_STATUS_FINISH


if __name__ == '__main__':
    if PIDUtils.isPidFileExist('gms'):
        print 'Previous GuangFa model signal process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('gms', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    CTPUtils.Singleton = CTPUtils()
    CTPUtils.Singleton.start()
    gms = GFModelSignalCrawlerManager('conf/crawler/gms.cfg')
    gms.start()
    pidutils = PIDUtils('gms', gms.shutDown, 5, gms.logger)
    pidutils.start()
    sys.exit(0)              