# -*- encoding: utf-8 -*-
import time

import os.path
from pathlib import Path
import traceback
import mj_crawler.Lib.Logger.log4py as logging
from mj_crawler.mj_parser import ParserRegistry
from mj_crawler.mj_source import SourceRegistry
from mj_crawler.mj_parser import common_errors
from mj_crawler.CrawlerRegister import CrawlerRegister


class Crawler(CrawlerRegister):

    delays = {
        'ETK': (20, 40),
        'WEB': (20, 50)
    }

    def __init__(self, parser_type, root_dir, brand, zk_connect, redis_master=None , redis_password=None , redis_host=None, redis_port=6379, redis_sentinels=None, redis_timeout=10000, parallelism=10, daily_limit=100, name=None, hour_limit=None):
        self.root_dir = root_dir
        self.parsers = {}
        self.source = SourceRegistry.create_source(parser_type, brand, root_dir)
        if not self.source:
            raise Exception("Cannot find proper mj_source for {0} {1}".format(brand, parser_type))

        if brand == "vwag":
            self.delay = Crawler.delays.get(parser_type, (30, 60))
        else:
            self.delay = (1, 2)

        self.stop_signal = False
        self.stopped = True
       
        self.logger = logging.get_logger("crawler", "Crawler")
        super(Crawler, self).__init__(brand, zk_connect, parser_type, redis_master, redis_password , redis_host, redis_port, redis_sentinels, redis_timeout, parallelism, daily_limit, name)

    def fetch_parser(self, group):
        if group in self.parsers:
            return self.parsers[group]

        parser = ParserRegistry.create_parser(parser_type, group, self.root_dir)
        if not parser:
            raise Exception("Cannot find proper mj_parser for {0} {1}".format(group, parser_type))

        self.parsers[group] = parser
        return parser

    def handleRequest(self, vin_info):
        str_vin = vin_info[0]

        self.logger.debug('# crawler get vin = [' + str_vin + ']')
        try:
            code, filename = self.source.get_data(*(vin_info[:-1]))
        except Exception as excp:
            print(traceback.format_exc())
            self.logger.error(excp)
            code = common_errors.ERR_UNKNOWN
            filename = None

        if not self.is_integer(code):
            self.logger.error("Invalid result code {0}".format(code))
            code = common_errors.ERR_UNKNOWN
        
        parser = self.fetch_parser(vin_info[3])
        if code != common_errors.ERR_SUCCESS:
            self.logger.error("{0}: {1}".format(str_vin, Crawler.messages.get(code, u"未知错误")))
            if code == common_errors.ERR_NOT_SUPPORTED_BRAND:
                response_list = None
            else:
                response_list = parser.get_default_result()
        else:
            code, response_list = parser.parse(filename)

        message = Crawler.messages.get(code, u"未知错误")
        self.logger.warning("{0}: {1}: {2}".format(str_vin, str(code), message))
#         self.sendResponse(code, response_list, str_vin);
        return (code, response_list)

    def on_exit(self, event):
        self.logger.warning('Exit signal received, shutting down')

        self.stop_signal = True
        while not self.stopped:
            time.sleep(10)

        Path("run.pid").unlink()
        self.logger.warning('Shutdown completed')
        return True


def set_exit_handler(func):
    import os
    import sys
    if os.name == "nt":
        try:
            import win32api
            win32api.SetConsoleCtrlHandler(func, True)
        except ImportError:
            version = ".".join(map(str, sys.version_info[:2]))
            print("pywin32 not installed for Python " + version)
            raise Exception("pywin32 not installed for Python " + version)
    else:
        import signal
        signal.signal(signal.SIGTERM, func)


if __name__ == '__main__':
    import getopt
    import sys

    try:
        opts, args = getopt.getopt(sys.argv[1:], "t:l:b:a:d:n:z:r:s:::",
                                   ["type=", "limit=", "brand=", "redis_password=", "directory=", "name=", "zookeeper=", "redis=", "seredis_ntinel=", "master=", "parallelism=", "hour_limit="])
    except getopt.GetoptError as e:
        print(e)
        sys.exit(2)

    brand = ""
    limit = 200
    hour_limit = None
    parallelism = 1
    name = None
    zookeeper = "127.0.0.1:2181"
    redis = None
    redis_sentinel = None
    redis_password = None
    redis_sentinel_host = None
    redis_sentinel_port = None
    redis_sentinel_master = None
    redis_host = None
    redis_port = None

    parser_type = "WEB"
    root_dir = os.getcwd()

    for param, arg in opts:
        if param in ("-b", "--brand"):
            brand = arg
        elif param in ("-l", "--limit"):
            limit = int(arg)
        elif param in ("--hour_limit"):
            hour_limit = int(arg)
        elif param in ("-t", "--type"):
            parser_type = str(arg).upper()
        elif param in ("-d", "--directory"):
            root_dir = arg
        elif param in ("-n", "--name"):
            name = arg
        elif param in ("-z", "--zookeeper"):
            zookeeper = arg
        elif param in ("-r", "--redis"):
            redis = arg
        elif param in ("--parallelism"):
            parallelism = int(arg)
        elif param in ("-s", "--sntinel"):
            redis_sentinel = arg
        elif param in ("-a", "--password"):
            redis_password = arg
        elif param in ("--master"):
            redis_sentinel_master = arg

    if not brand: 
        print("brand must be specified with -b or --brand=")
        sys.exit(2)
  
#     if Path("run.pid").exists():
#         print("a process is already running, or delete the stale run.pid file")
#         sys.exit(2)

    logger = logging.get_logger("crawler", "main", root_dir=root_dir, level=logging.WARNING)
    try:
        pid = os.getpid()
        pidFile = os.path.join(root_dir, "run.pid")
        with open(pidFile, "w") as fo:
            fo.write(str(pid))
       
        if  redis:
            split_arr = redis.split(':');
            redis_host = split_arr[0]
            redis_port = split_arr[1] 
        if  redis_sentinel:
            redis_sentinels = []
            for rs in redis_sentinel.split(','):
                split_arr = rs.split(':')
                redis_sentinels.append((split_arr[0], int(split_arr[1])))
            
        crawler = Crawler(parser_type=parser_type, root_dir=root_dir, brand=brand, zk_connect=zookeeper, redis_master=redis_sentinel_master, redis_password=redis_password, redis_host=redis_host, redis_port=redis_port, redis_sentinels=redis_sentinels, parallelism=parallelism, daily_limit=limit, name=name, hour_limit=hour_limit)
        
#         crawler = Crawler(parser_type='WEB', root_dir='C:/Users/Louyj/Desktop/Crawler-1', brand='gm', zk_connect='112.74.198.224:21888', redis_master='mymaster', redis_password='paicRedisPass', redis_sentinels=[('112.74.198.224', 26780)], parallelism=1, daily_limit=100, name='crawler_test')
        
        set_exit_handler(crawler.on_exit)
    except Exception as ex:
        logger.error(ex)
