#!/usr/bin/env python
# -*-coding:utf-8 -*-
import base64
import json
import socket
import threading
import time
from queue import Queue
from threading import Timer

import requests
import urllib3

import settings
from kernel.model import SpiderConfig, Page
from kernel.utils import Logger, Downloader, RedisMessageServer, CustomMysql

urllib3.disable_warnings()


class Monitor(threading.Thread):
    headers = {"Connection": "close", "Running-Env": settings.ACTIVE}
    logger = Logger('Spider', 'Monitor').get_logger()
    spider_class = {}
    state = {}
    wait_active = []
    has_active = {}
    spiderConfig = {}
    subclass = []
    download = None

    def __init__(self):
        super(Monitor, self).__init__()
        self.requests = requests
        self.customMysql = CustomMysql(
            host=settings.MYSQL_HOST,
            port=settings.MYSQL_PORT,
            db=settings.MYSQL_DB,
            user=settings.MYSQL_USER,
            pwd=settings.MYSQL_PASSWORD
        )

    def get_all_subclass(self, download, super):
        self.download = download
        for subclass in super.__subclasses__():
            self.spider_class[subclass.name] = subclass

    def stop_event(self):
        pass

    def run(self):
        while True:
            try:
                self.reload_spider_config()
            except Exception as e:
                self.logger.error(repr(e))
            finally:
                self.logger.info('Monitor sleep 10 seconds')
                time.sleep(5)

    def reload_spider_config(self):
        try:
            # response = self.requests.get(
            #     url=settings.MONITOR_ADDRESS + settings.SPIDER_CONFIG_API,
            #     headers=self.headers,
            #     timeout=5)
            # result = json.loads(response.content)
            json_config = self.customMysql.select_list(table='spider_config', query={}, limit=10000)
            if json_config:
                for config in json_config:
                    config = {
                        'spiderId': config['id'],
                        'siteName': config['site_name'],
                        'spiderName': config['spider_name'],
                        'domain': config['domain'],
                        'spiderQueue': config['spider_queue'],
                        'reaperQueue': config['reaper_queue'],
                        'threadNum': config['thread_num'],
                        'state': config['state']
                    }
                    self.logger.info(config)
                    spider_config = SpiderConfig()
                    spider_config.from_json(config)
                    self.spiderConfig[spider_config.spiderId] = config
                    self.state[spider_config.spiderId] = spider_config.state
                    if spider_config.state == 1:
                        self.wait_active.append(spider_config.spiderId)
                    if spider_config.state == 0:
                        self.logger.warning('Spider %s stop' % spider_config.spiderName)
                self.active()

        except Exception as e:
            self.logger.error(repr(e))

    def switch(self, spider_id):
        if spider_id in self.state and self.state[spider_id]:
            return True
        else:
            return False

    def register(self, spider_id, state):
        self.state[spider_id] = state

    def active(self):
        while len(self.wait_active) > 0:
            spider_id = self.wait_active.pop(len(self.wait_active) - 1)
            config = self.spiderConfig[spider_id]
            if spider_id not in self.has_active:
                site_name = config['siteName']
                if site_name in self.spider_class:
                    subclass = self.spider_class[site_name]
                    py = getattr(self.download, subclass.__module__.split('.')[-1])
                    cls = getattr(py, subclass.__name__)
                    if cls:
                        obj = cls(config=config, monitor=self)
                        getattr(obj, 'start')()
                        self.has_active[spider_id] = obj
                else:
                    self.logger.error('There is no the %s spider' % site_name)
            else:
                obj = self.has_active[spider_id]
                obj.config.from_json(config)
                getattr(obj, 'start')()
                self.has_active[spider_id] = obj


class Heartbeat(threading.Thread):
    logger = Logger(category='Spider', class_name='Report').get_logger()
    headers = {"Content-type": "application/json; charset=UTF-8", 'Connection': 'close', 'Running-Env': settings.ACTIVE}

    def __init__(self, config, generate_list, message_server=RedisMessageServer()):
        super(Heartbeat, self).__init__()
        self.success = 0
        self.error = 0
        self.q = Queue()
        self.config = config
        self.generate_list = generate_list
        self.messageServer = message_server
        self.requests = requests
        self.customMysql = CustomMysql(
            host=settings.MYSQL_HOST,
            port=settings.MYSQL_PORT,
            db=settings.MYSQL_DB,
            user=settings.MYSQL_USER,
            pwd=settings.MYSQL_PASSWORD
        )

    def status(self, status):
        if status:
            self.success += 1
        else:
            self.error += 1

    def run(self):
        lock = threading.Lock()
        self.report()
        while True:
            status = self.q.get()
            lock.acquire()
            self.status(status)
            lock.release()

    @staticmethod
    def get_host_ip():
        s = None
        try:
            s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            s.connect(('8.8.8.8', 80))
            ip = s.getsockname()[0]
        finally:
            if s:
                s.close()
        return ip

    def report(self):
        try:
            data = {
                't': 1,
                # 'group_id': self.config.groupId,
                'spider_id': self.config.spiderId,
                'queue_name': self.config.spiderQueue,
                'l': self.messageServer.get_queue_size(),
                's': self.success,
                'e': self.error,
                'c': len(self.generate_list),
                'report_ip': self.get_host_ip(),
                'create_time': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
                'update_time': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
            }
            self.logger.info('Spider %s report data %s' % (self.config.spiderName, json.dumps(data)))
            status = self.customMysql.insert(table='heat_beat', data=data)
            if status:
                self.logger.info('Spider %s report success' % self.config.spiderName)
                self.success = 0
                self.error = 0

        except Exception as e:
            self.logger.error(repr(e))
        finally:
            t = Timer(10, self.report)
            t.start()


class Spider(object):
    logger = Logger(category='Spider', class_name='Spider').get_logger()
    name = 'Spider'
    domain = ''

    def __init__(self, config=None, monitor=Monitor()):
        self.downloader = Downloader(self.logger)
        self.config = SpiderConfig()
        self.config.from_json(config)
        self.domain = self.config.domain
        self.monitor = monitor
        self.monitor.register(self.config.spiderId, self.config.state)
        self.generate_list = []  # 已经创建的线程
        self.messageServer = RedisMessageServer(
            redis_host=settings.REDIS_HOST,
            redis_port=settings.REDIS_PORT,
            db=settings.REDIS_DB,
            init_queue_name=self.config.spiderQueue
        )
        self.logger.info('Spider %s init' % self.config.spiderName)
        self.heartbeat = Heartbeat(self.config, self.generate_list, self.messageServer)
        self.heartbeat.start()

    def start_url(self, url):
        if url:
            self.messageServer.send_message(msg=url)

    def download(self, page):
        self.logger.warning('There is no download')
        return None

    def start(self):
        if self.config.state:
            if self.config.startUrl:
                self.start_url(url=self.config.startUrl)
            lock = threading.Lock()
            while len(self.generate_list) < self.config.threadNum:  # 无空闲线程和不超过最大线程数
                lock.acquire()
                self.generate_thread()  # 创建线程
                lock.release()
            self.logger.info('Spider %s run' % self.config.spiderName)

    def stop(self):
        self.logger.info('%s stop' % self.config.website)

    def generate_thread(self):
        t = threading.Thread(target=self.run)
        t.daemon = True
        t.start()

    def format(self, obj=None):
        try:
            if obj:
                temp = {}
                temp.update(obj)
                for key in temp:
                    if not temp[key] and temp[key] != 0.0 and temp[key] != 0 or temp[key] == '-' or not key:
                        del obj[key]
        except Exception as e:
            self.logger.error(repr(e))

    def run(self):
        """
        循环去获取任务函数并执行任务函数
        """
        current_thread = threading.currentThread  # 获取当前线程对象
        self.generate_list.append(current_thread)  # 添加到已创建线程里
        while self.monitor.switch(self.config.spiderId):
            try:
                message = self.messageServer.pop_message(self.config.spiderQueue)
                if message:
                    page = Page()
                    page.url = message
                    page.domain = self.config.domain
                    try:
                        self.download(page=page)  # 运行函数，把结果赋值给result
                    except Exception as e:
                        self.logger.error(repr(e))
                    finally:
                        self.heartbeat.q.put_nowait(page.status)
                        self.callback(page=page)
                else:
                    time.sleep(3)  # 如果没有获取到消息，当前线程等待1秒钟
            except Exception as e:
                self.logger.error(repr(e))  # 结果为错误信息
        else:
            self.generate_list.remove(current_thread)  # 如果收到终止信号，就从已创建的列表删除

    def callback(self, page=Page()):
        if page:
            if page.status:
                if page.result:
                    page.result['url'] = page.url
                    page.result['domain'] = page.domain
                    self.messageServer.send_message(queue_name=self.config.reaperQueue, msg=json.dumps(page.result))
                if page.data:
                    for one in page.data:
                        one['url'] = page.url
                        one['domain'] = page.domain
                        self.messageServer.send_message(queue_name=self.config.reaperQueue, msg=json.dumps(one))
                if page.urlList:
                    for url in page.urlList:
                        self.messageServer.send_message(queue_name=self.config.spiderQueue, msg=url)
            else:
                if page.url:
                    # self.logger.warning('The url=%s download failed, sleeping 100 seconds' % page.url)
                    self.messageServer.send_message(queue_name=self.config.spiderQueue, msg=page.url)
                else:
                    self.logger.error('There is no Page.url, checkout you download page')
        else:
            self.logger.error('There is no page')


if __name__ == "__main__":
    pass
