#!/usr/bin/env python
# -*-coding:utf-8 -*-
import base64
import json
import logging
import os
import random
import re
import threading
import redis
import requests
import settings
import pymysql

from logging.handlers import TimedRotatingFileHandler
from dbutils.pooled_db import PooledDB
from elasticsearch import Elasticsearch
from log4mongo.handlers import MongoHandler
from kernel.model import Message


class Logger:
    """
    爬虫系统自由的日志工具类
    现提供以下三种日志打印方式
    1、ch 控制台打印
    2、fh 日志滚动方式，日期切割方式
    3、mongo存储，已注释掉
    """
    _fh_lock = threading.Lock()
    fh = None

    def __init__(self, category='', class_name=''):
        self.name = '[%s %s]' % (category, class_name)
        self.logger = logging.getLogger(self.name)
        self.logger.setLevel(logging.INFO)
        self.ch = logging.StreamHandler()
        # self.mon = MongoHandler(host=settings.MONGO_ADDRESS, database_name=settings.MONGO_LOG, collection='Log')
        formatter = '[%(asctime)s] [%(levelname)s] [%(filename)s:%(lineno)d] %(name)s %(message)s'
        self.ch.setFormatter(logging.Formatter(formatter))
        self.fh.setFormatter(logging.Formatter(formatter))
        # self.fh.suffix = "%Y-%m-%d_%H"
        # self.fh.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}$"
        # self.fh.extMatch = re.compile(self.fh.extMatch)
        self.logger.addHandler(self.fh)
        self.logger.addHandler(self.ch)
        # self.logger.addHandler(self.mon)

    def get_logger(self):
        return self.logger

    def __new__(cls, *args, **kwargs):
        if cls.fh is None:
            with Logger._fh_lock:
                if cls.fh is None:
                    path = settings.BASE_LOG_DIR  # 定义日志存放路径
                    if not os.path.exists(path):
                        os.makedirs(path)
                    filename = path + 'spider' + '.log'  # 日志文件名称
                    cls.fh = TimedRotatingFileHandler(filename, 'D', 1, 7)
        return object.__new__(cls)


class Downloader:
    """
    爬虫系统自有下载工具类，现只支持http请求方式
    """
    logger = Logger(category='Spider', class_name='Downloader').get_logger()

    UAS = [
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.182 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36 Edg/88.0.705.63',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.146 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3803.116 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) HeadlessChrome/88.0.4324.104 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36 Edg/88.0.705.56',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.141 Safari/537.36 OPR/73.0.3856.344',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36 SLBrowser/7.0.0.1071 SLBChan/25',
        'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.66',
        'Mozilla/5.0 (Windows NT 10.0: Win64: x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36 SLBrowser/7.0.0.9 SLBChan/25',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4356.6 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.3',
        'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36 Edg/88.0.705.50',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:85.0) Gecko/20100101 Firefox/85.0',
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36'
    ]

    def __init__(self, logger=None):
        if logger:
            self.logger = logger

    def __proxy(self, url=''):
        host = "megaproxy.rotating.proxyrack.net"
        port = "222"
        username = "nestachen123"
        password = "e47bc4-61d819-b79a39-cd344e-0c203f"
        self.logger('Download url=%s with proxy=%s' % (url, host + ':' + port))
        return {
            'http': 'http://%s:%s@%s:%s' % (username, password, host, port),
            'https': 'http://%s:%s@%s:%s' % (username, password, host, port),
        }

    def get(self, url='', params=None, headers=None, timeout=10, **args):
        hs = {
            'User-Agent': random.choice(self.UAS)
        }
        if headers:
            hs.update(headers)
        res = requests.get(
            url=url,
            params=params,
            headers=hs,
            proxies=self.__proxy(url=url),
            verify=False,
            cert=False,
            timeout=timeout,
            **args)
        self.logger.info('Download url=%s response code=%s' % (url, res.status_code))
        return res

    def post(self, url='', params=None, data=None, json=None, headers=None, timeout=10, **args):
        hs = {
            'User-Agent': random.choice(self.UAS)
        }
        if headers:
            hs.update(headers)
        res = requests.post(
            url=url,
            params=params,
            data=data,
            json=json,
            headers=hs,
            proxies=self.__proxy(url=url),
            verify=False,
            cert=False,
            timeout=timeout,
            **args)
        self.logger.info('Download url=%s response code=%s' % (url, res.status_code))
        return res

    def get_no_proxy(self, url='', params=None, headers=None, timeout=10, **args):
        hs = {
            'User-Agent': random.choice(self.UAS)
        }
        if headers:
            hs.update(headers)
        res = requests.get(
            url=url,
            params=params,
            headers=hs,
            verify=False,
            cert=False,
            timeout=timeout,
            **args)
        self.logger.info('Download url=%s response code=%s' % (url, res.status_code))
        return res

    def post_no_proxy(self, url='', params=None, data=None, json=None, headers=None, timeout=10, **args):
        hs = {
            'User-Agent': random.choice(self.UAS)
        }
        if headers:
            hs.update(headers)
        res = requests.post(
            url=url,
            params=params,
            data=data,
            json=json,
            headers=hs,
            verify=False,
            cert=False,
            timeout=timeout,
            **args)
        self.logger.info('Download url=%s response code=%s' % (url, res.status_code))
        return res


class CustomRedis:
    """
    自定义连接redis缓存池工具类
    """

    logger = Logger(category='Spider', class_name='CustomRedis').get_logger()

    def __init__(self, host='', pwd='', port=6379, db=0):
        self.redis_pool = redis.ConnectionPool(host=host, password=pwd, port=port, db=db, decode_responses=True)

    def str_get(self, key):
        result = None
        try:
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.get(key)
        except Exception as e:
            self.logger.error(repr(e))
        finally:
            return result

    def str_set(self, key, value):
        result = False
        try:
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.set(key, value)
        except Exception as e:
            self.logger.error(repr(e))
        finally:
            return result

    def list_len(self, name):
        result = 0
        try:
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.llen(name)
        except Exception as e:
            self.logger.error(repr(e))
        finally:
            return result

    def list_add(self, name, value):
        result = False
        try:
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.lpush(name, value)
            if result > 0:
                result = True
        except Exception as e:
            self.logger.error(repr(e))
        finally:
            return result

    def list_pop(self, name):
        result = None
        try:
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.rpop(name)
        except Exception as e:
            self.logger.error(repr(e))
        finally:
            return result


class RedisMessageServer:
    """
    RedisMS 适合大量碎片化的的消息内容，消息内容小，消息量小，不适宜存储大量文本类消息
    """
    logger = Logger(category='Spider', class_name='RedisMessageServer').get_logger()

    def __init__(self, redis_host='', redis_port=6379, db=0, init_queue_name=''):
        if not redis_host:
            self.logger.error('Message server host is null')
        self.redis_pool = redis.ConnectionPool(host=redis_host, port=redis_port, db=db, decode_responses=True)
        self.init_queue_name = init_queue_name

    def pop_message(self, queue_name=''):
        message = None
        try:
            if not queue_name:
                queue_name = self.init_queue_name
            engineer = redis.Redis(connection_pool=self.redis_pool)
            message = engineer.rpop(queue_name)
            if message:
                self.logger.info('Pop message success, queue=%s, message=%s' % (queue_name, message))
            else:
                self.logger.info('There is no message to pop from queue=%s' % queue_name)
        except Exception as e:
            self.logger.error('Pop message failed, queue=%s, errorMsg=%s' % (queue_name, repr(e)))
            self.logger.error(repr(e))
        finally:
            return message

    def send_message(self, queue_name='', msg=''):
        result = False
        try:
            if not queue_name:
                queue_name = self.init_queue_name
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.lpush(queue_name, msg)
            if result > 0:
                result = True
                self.logger.info('Send message success, queue=%s, message=%s' % (queue_name, msg))
            else:
                self.logger.error('Send message failed, queue=%s')
        except Exception as e:
            self.logger.error(repr(e))
        return result

    def get_queue_size(self, queue_name=''):
        result = 0
        try:
            if not queue_name:
                queue_name = self.init_queue_name
            engineer = redis.Redis(connection_pool=self.redis_pool)
            result = engineer.llen(queue_name)
        except Exception as e:
            self.logger.error(repr(e))
        finally:
            return result


class RocketMessageServer:
    """
    RocketMQ 接口方式，需要依赖外层api服务，现停用。
    """
    headers = {
        "Content-type": "application/json; charset=UTF-8",
        "Accept": "application/json",
        "Connection": "close"
    }
    logger = Logger(category='Spider', class_name='MessageServer').get_logger()

    def __init__(self, address='', init_queue_name=''):
        self.messageServerAddress = address
        self.requests = requests
        # 设置连接活跃状态为False
        self.init_queue_name = init_queue_name
        if init_queue_name:
            if not self.is_queue_exist(init_queue_name):
                self.create_queue(init_queue_name)

    def pop_message(self, queue_name=''):
        try:
            url = self.messageServerAddress + '/mq/popMessage?queueName=' + queue_name
            response = self.requests.get(url=url, timeout=5, headers=self.headers)
            result = json.loads(response.content)
            if result['success']:
                data = result['data']
                if data:
                    message = str(base64.b64decode(data['message']), 'utf-8')
                    task_id = data['taskId']
                    self.logger.info('Pop message success, queue=%s, message=%s' % (queue_name, message))
                    return message, task_id
                else:
                    self.logger.info('There is no message to pop from queue=%s' % queue_name)
            else:
                self.logger.error('Pop message failed, queue=%s, errorMsg=%s' % (queue_name, result['message']))

        except Exception as e:
            self.logger.error(repr(e))
        return None, None

    def send_message(self, task_id='', queue_name='', msg='', delay=0):
        try:
            if not queue_name:
                queue_name = self.init_queue_name
            message = Message(task_id=task_id, message=msg, delay=delay)
            message.queueName = queue_name
            url = self.messageServerAddress + '/mq/sendMessage'
            response = self.requests.post(
                url=url,
                headers=self.headers,
                data=json.dumps(message.to_json()).encode('utf-8'),
                timeout=5)
            result = json.loads(response.content)
            if result['success']:
                self.logger.info('Send message success, queue=%s, message=%s' % (queue_name, msg))
            else:
                self.logger.error('Send message failed, queue=%s, errorMsg=%s' % (queue_name, result['message']))
        except Exception as e:
            self.logger.error(repr(e))

    def create_queue(self, queue_name=''):
        try:
            url = self.messageServerAddress + '/mq/createQueue?queueName=' + queue_name
            response = self.requests.get(url=url, timeout=5, headers=self.headers)
            result = json.loads(response.content)
            if result['success']:
                self.logger.info('Create queue %s success' % queue_name)
            else:
                self.logger.error('Create queue %s failed, msg=%s' % (queue_name, result['message']))
        except Exception as e:
            self.logger.error(repr(e))

    def is_queue_exist(self, queue_name=''):
        try:
            url = self.messageServerAddress + '/mq/isQueueExist?queueName=' + queue_name
            response = self.requests.get(url=url, timeout=5, headers=self.headers)
            result = json.loads(response.content)
            if result['success']:
                if str(result['data']['Exists']) == 'true':
                    return True
            else:
                self.logger.error('Connecting messageServer error, msg=%s' % result['message'])
        except Exception as e:
            self.logger.error(repr(e))
        return False

    def get_queue_size(self, queue_name=''):
        try:
            url = self.messageServerAddress + '/mq/getQueueSize?queueName=' + queue_name
            response = self.requests.get(url=url, timeout=5, headers=self.headers)
            result = json.loads(response.content)
            if result['success']:
                return int(result['data']['queueSize'])
        except Exception as e:
            self.logger.error(repr(e))
        return 0


class CustomMysql(object):
    logger = Logger(category='Reaper', class_name='CustomMysql').get_logger()
    INSERT = 1
    DELETE = 2
    UPDATE = 3
    SELECT = 4

    def __init__(self, host='', port=3306, db='', user='', pwd='', charset='utf8'):
        self.mysqlPool = PooledDB(
            pymysql,
            maxconnections=10,
            host=host,
            port=port,
            user=user,
            passwd=pwd,
            db=db,
            charset=charset)

    def join_sql(self, mode=0, table='', data=None, query=None):
        sql = ''
        if mode == self.INSERT and data:
            k = ['`%s`' % x for x in data.keys()]
            v = ["'%s'" % x for x in data.values()]
            params = (table, ', '.join(k), ', '.join(v))
            sql = 'insert into `%s` (%s) values (%s)' % params
        elif mode == self.DELETE and query:
            qd = [str.format("`{}`='{}'", k, v) for k, v in query.items()]
            params = (table, ' and '.join(qd))
            sql = 'delete from `%s` where %s' % params
        elif mode == self.UPDATE and data and query:
            sd = [str.format("`{}`='{}'", k, v) for k, v in data.items()]
            qd = [str.format("`{}`='{}'", k, v) for k, v in query.items()]
            params = (table, ', '.join(sd), ' and '.join(qd))
            sql = 'update `%s` set %s where %s' % params
        elif mode == self.SELECT:
            sql = 'select * from `%s`' % table
            if query:
                qd = [str.format("`{}`='{}'", k, v) for k, v in query.items()]
                params = ' and '.join(qd)
                if params:
                    sql = sql + ' where %s' % params
        else:
            raise Exception('There is no match to mode')
        self.logger.info('%s' % sql)
        return sql

    def insert(self, table='', data=None):
        conn = None
        cursor = None
        try:
            if table and data:
                sql = self.join_sql(mode=self.INSERT, table=table, data=data)
                conn = self.mysqlPool.connection()
                cursor = conn.cursor()
                cursor.execute(sql)
                conn.commit()
                return True
            else:
                raise Exception('Insert error; The table or data must not be none')
        except Exception as e:
            self.logger.error(repr(e))
            conn.rollback()
        finally:
            if cursor:
                cursor.close()
            if conn:
                conn.close()
        return False

    def delete(self, table='', query=None):
        conn = None
        cursor = None
        try:
            if table and query:
                sql = self.join_sql(mode=self.DELETE, table=table, query=query)
                conn = self.mysqlPool.connection()
                cursor = conn.cursor()
                cursor.execute(sql)
                conn.commit()
                return True
            else:
                raise Exception('Delete error; The table or query must not be none')
        except Exception as e:
            self.logger.error(repr(e))
            conn.rollback()
        finally:
            if cursor:
                cursor.close()
            if conn:
                conn.close()
        return False

    def update(self, table='', data=None, query=None):
        """
        根据查询条件更改数据
        :param table:
        :param data:    dict {a:a, b:b} 只支持
        :param query:   dict {a:a, b:b}
        :return:
        """
        conn = None
        cursor = None
        try:
            if table and data and query:
                sql = self.join_sql(mode=self.UPDATE, table=table, data=data, query=query)
                conn = self.mysqlPool.connection()
                cursor = conn.cursor()
                cursor.execute(sql)
                conn.commit()
                return True
            else:
                raise Exception('Update error; The table or data or query must not be none')
        except Exception as e:
            self.logger.error(repr(e))
            conn.rollback()
        finally:
            if cursor:
                cursor.close()
            if conn:
                conn.close()
        return False

    def select_one(self, table='', query=None):
        conn = None
        cursor = None
        try:
            if table and query:
                sql = self.join_sql(mode=self.SELECT, table=table, query=query)
                conn = self.mysqlPool.connection()
                cursor = conn.cursor(pymysql.cursors.DictCursor)
                cursor.execute(sql)
                conn.commit()
                row = cursor.fetchone()
                if row:
                    return row
            else:
                raise Exception('Select_one error; The table or query must not be none')
        except Exception as e:
            self.logger.error(repr(e))
            conn.rollback()
        finally:
            if cursor:
                cursor.close()
            if conn:
                conn.close()
        return None

    def select_list(self, table='', query=None, skip=0, limit=1):
        """
        根据条件查询多条数据，支持分页
        :param table:   表明
        :param query:   查询条件，dictionary, {a:a, b:b}
        :param skip:    起始角标
        :param limit:   返回值条数
        :return:        None or list
        """
        conn = None
        cursor = None
        try:
            if table:
                sql = self.join_sql(mode=self.SELECT, table=table, query=query)
                sql = sql + ' limit %s, %s' % (skip, limit)
                conn = self.mysqlPool.connection()
                cursor = conn.cursor(pymysql.cursors.DictCursor)
                cursor.execute(sql)
                conn.commit()
                rows = cursor.fetchall()
                if rows:
                    return rows
            else:
                raise Exception('Select_one error; The table or query must not be none')
        except Exception as e:
            self.logger.error(repr(e))
            conn.rollback()
        finally:
            if cursor:
                cursor.close()
            if conn:
                conn.close()
        return None

    def is_exists(self, table='', query=None):
        conn = None
        cursor = None
        try:
            if table and query:
                sql = self.join_sql(mode=self.SELECT, table=table, query=query)
                conn = self.mysqlPool.connection()
                cursor = conn.cursor(pymysql.cursors.DictCursor)
                cursor.execute(sql)
                conn.commit()
                row = cursor.fetchone()
                if row:
                    return True
            else:
                raise Exception('Is_exists error; The table or data or query must not be none')
        except Exception as e:
            self.logger.error(repr(e))
            conn.rollback()
        finally:
            if cursor:
                cursor.close()
            if conn:
                conn.close()
        return False


class CustomEs(object):
    logger = Logger(category='Reaper', class_name='CustomEs').get_logger()
    doc_type = 'doc'
    ik_smart = 'ik_smart'
    ik_max_word = 'ik_max_word'
    standard = 'standard'

    def __init__(self, host=None):
        self.es = Elasticsearch(hosts=[host])
        self.host = host['host']
        self.port = host['port']

    def create_index(self, index=''):
        if not self.es.indices.exists(index):
            self.es.indices.create(index=index)
            self.logger.info('The index=%s has created' % index)
        else:
            self.logger.info('The index=%s has exists' % index)

    def delete_index(self, index=''):
        if self.es.indices.exists(index):
            result = self.es.indices.delete(index=index)
            if result['acknowledged']:
                self.logger.info('The index=%s has been deleted' % index)
            else:
                self.logger.error(result)
        else:
            self.logger.info('There is no the index=%s' % index)

    def put_mapping(self, index='', mapping=None):
        if mapping:
            result = self.es.indices.put_mapping(index=index, doc_type=self.doc_type, body=mapping,
                                                 include_type_name=True)
            self.logger.info(json.dumps(result))
        else:
            self.logger.warning('The mapping must not be null')

    def get_mapping(self, index=''):
        result = self.es.indices.get_mapping(index=index)
        self.logger.info(json.dumps(result))
        return result

    def update_or_insert(self, index='', pid='', doc=None):
        body = {
            'doc': doc,
            'doc_as_upsert': True
        }
        result = self.es.update(index=index, id=pid, doc_type=self.doc_type, body=body)
        if result:
            self.logger.info('Upsert index=%s _id=%s has success' % (index, result['_id']))
        # self.logger.info(json.dumps(result).decode("unicode_escape"))

    def delete_by_pid(self, index='', pid=''):
        result = self.es.delete(index=index, id=pid, doc_type=self.doc_type)
        if result:
            self.logger.info('Delete index=%s _id=%s has success' % (index, result['_id']))
        # self.logger.info(json.dumps(result).decode("unicode_escape"))

    def analyze(self, keyword='', analyzer=''):
        url = 'http://%s:%s/_analyze/?pretty' % (self.host, self.port)
        data = {
            "analyzer": analyzer,
            "text": keyword
        }
        response = requests.post(url=url, headers={'Content-Type': 'application/json'}, json=data)
        if response.status_code == 200:
            result = json.loads(response.content)
            self.logger.info(json.dumps(result))
            return result
        return None

    def query(self, index='', query=None):
        if index:
            if query:
                result = self.es.search(index=index, body=query)
                return result
                # for one in result['hits']['hits']:
                #     self.logger.info(json.dumps(one).decode("unicode_escape"))
            else:
                result = self.es.search(index=index)
                return result
                # for one in result['hits']['hits']:
                #     self.logger.info(json.dumps(one).decode("unicode_escape"))
        return None


if __name__ == '__main__':
    host = settings.REDIS_HOST
    port = settings.REDIS_PORT
    redisMessageServer = RedisMessageServer(redis_host=host, redis_port=port, init_queue_name='CRAWLER-UQ-Test')
    print(redisMessageServer.send_message(msg='中国'))
    print(redisMessageServer.get_queue_size())
    # print(redisMessageServer.pop_message())
