'''
Created on Nov 14, 2015

@author: eyaomai
'''
import sys
sys.path.append(sys.argv[0][:sys.argv[0].rfind('com/stocklens')])
import traceback
from bs4 import BeautifulSoup
from com.stocklens.stock.common.utils import Logging,  PIDUtils 
from com.stocklens.stock.data.crawlercommon import CrawlerConstants, CrawlerManager, CrawlerBase

class JdListCrawlerManager(CrawlerManager):
    '''
    classdocs
    '''
    LOGGER_NAME_CRAWL = 'jdl'
    CONFIG_FILE_CAGEGORYID = 'categoryid'
    CONFIG_FILE_TOTALPAGE = 'totalpage'
    def __init__(self, json_config_file):
        '''
        Constructor
        '''
        super(JdListCrawlerManager, self).__init__(json_config_file, 0.1, None)
        self.logger = Logging.getLogger(JdListCrawlerManager.LOGGER_NAME_CRAWL)
    
    def _getNonCommonConfig(self, config):
        self.__categoryid = config[JdListCrawlerManager.CONFIG_FILE_CAGEGORYID]
        self.__totalpage = config[JdListCrawlerManager.CONFIG_FILE_TOTALPAGE]

    def _generateTask(self, task, checkTaskList=True):
        if super(JdListCrawlerManager, self)._generateTask(task, True) is False:
            return False
        task.update(self._taskList.pop(0))

        return True

    def _initTask(self):
        for i in range(1,self.__totalpage+1):
            request = {CrawlerConstants.PARA_CLASS:JdListCrawler,
                       JdListCrawler.PARA_CATEGORYID:self.__categoryid,
                       JdListCrawler.PARA_PAGE:i}
            self._taskList.append(request)        


class JdListCrawler(CrawlerBase):
    URL = 'http://list.jd.com/list.html?cat=%s&page=%d&sort=sort_commentcount_desc&plist=1'
    PARA_PAGE = 'page'
    PARA_CATEGORYID = 'categoryid'        
    def __init__(self, controller, dbProxy, request):
        super(JdListCrawler, self).__init__(controller, dbProxy, request)
        self.__categoryid = request[JdListCrawler.PARA_CATEGORYID]
        self.__page = request[JdListCrawler.PARA_PAGE]
        self.__url = JdListCrawler.URL % (self.__categoryid, self.__page)

    def run(self):
        super(JdListCrawler, self).run()
        status = CrawlerConstants.VAL_STATUS_FINISH
        
        if self.__url is None:
            self.logger.error('url is none')            
            status = CrawlerConstants.VAL_STATUS_FAILURE
        else:
            self.logger.info('Start Crawl %s', self.__url)
            content = self._fetchContent(self.__url)
            if content is None:
                status = CrawlerConstants.VAL_STATUS_FAILURE
            else:
                status = self.__parse(content)
        self._reportDone(status)
        self.logger.info('Finish Crawl')
    
    def __parse(self, content):
        try:
            soup = BeautifulSoup(content)
            plist = soup.findAll('div',{'id':'plist'})
            items = plist[0].findAll('div',{'class','p-name'})
            sql = 'INSERT INTO jditem (categoryid, itemid, itemname) values '
            values = list()
            for item in items:
                ahref = item.find('a')
                link = ahref.get('href')
                itemname = ahref.text.strip().strip('\n')
                lindex = link.rfind('/')
                rindex = link.rfind('.')
                itemid = link[lindex+1:rindex]
                values.append('("%s","%s","%s")' % (self.__categoryid, itemid, itemname))
            
            if len(values)>0:
                if self.dbProxy.execute(sql + ','.join(values))>0:
                    self.dbProxy.commit()
            return CrawlerConstants.VAL_STATUS_FINISH
        except Exception:
            traceInfo = traceback.format_exc()
            self.logger.warn('Fail to parse:%s:%s', self.__url, traceInfo)
            return CrawlerConstants.VAL_STATUS_FAILURE
if __name__ == '__main__':
    if PIDUtils.isPidFileExist('jdl'):
        print 'Previous JDL (JD list) Crawler process is on-going, please stop it firstly'
        sys.exit(1)
    import os
    pid = os.getpid()
    PIDUtils.writePid('jdl', pid)
    Logging.initLogger('conf/crawler/crawler.logging.cfg')
    jdl = JdListCrawlerManager('conf/crawler/jdl.cfg')
    jdl.start()
    pidutils = PIDUtils('jdl', jdl.shutDown, 5, jdl.logger)
    pidutils.start()
    sys.exit(0)
                        