#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
Author:Lijiacai
Email:1050518702@qq.com
===========================================
CopyRight@JackLee.com
===========================================
"""
import base64
import csv
import hashlib
import logging
import random
import redis
import pymysql
import cx_Oracle
import jieba
import shortuuid
from hashids import Hashids
from jieba import analyse
from elasticsearch import helpers
import datetime
import math
import os
import json
import time
import requests
from elasticsearch import Elasticsearch
import numpy as np
from sklearn.preprocessing import StandardScaler

os.environ["NLS_LANG"] = "SIMPLIFIED CHINESE_CHINA.UTF8"

es_cluster = ["http://117.78.42.21:8052/"]


class RandomString():
    def get_hashid(self, min_length=16, salt="PAND"):
        """
        生成随机加密字符串
        :param salt:
        :return:
        """
        ts = int(time.time() * 10000000)
        num = int(shortuuid.ShortUUID(alphabet='0123456789').random(length=8))
        hashids_ = Hashids(min_length=min_length, salt=salt)
        hashid = hashids_.encode(ts, num)
        return hashid

    def get_md5(self, s):
        m = hashlib.md5()
        b = s.encode(encoding='utf-8')
        m.update(b)
        str_md5 = m.hexdigest()
        return str_md5

    def get_base64(self, s):
        return base64.b64encode(s)

    def de_base64(self, s):
        return base64.b64decode(s)


class Oracle():
    def __init__(self, host="116.62.186.185", port="1521", service_name="ORCL", user=None, password=None,
                 conf=dict()):
        host = conf.get("host", host)
        port = conf.get("port", port)
        service_name = conf.get("ORCL", service_name)
        user = conf.get("user", user)
        password = conf.get("password", password)
        try:
            dsn_tns = cx_Oracle.makedsn(host, port, service_name=service_name)
            self.client = cx_Oracle.connect(
                user=user, password=password, dsn=dsn_tns)
            self.cursor = self.client.cursor()
        except Exception as e:
            self.output(str(e))
            raise Exception("---Connnect Error---")

    def fetch(self, query=""):
        """
        fetch data by sql (select)
        :param query: fetchmany(size=20)
        :return:
        """
        try:
            return eval("self.cursor.%s" % query)
        except Exception as e:
            self.output(str(e))

    def execute(self, query="", args=None):
        """
        excute sql
        :param query:
        :param args:
        :return:
        """
        try:
            self.cursor.execute(query)
        except Exception as e:
            self.output(str(e))

    def commit(self):
        """commit insert sql"""
        try:
            self.client.commit()
        except Exception as e:
            self.output(str(e))

    def __del__(self):
        try:
            self.close()
        except:
            pass

    def close(self):
        try:
            self.cursor.close()
        except Exception as e:
            pass
        try:
            self.client.close()
        except Exception as e:
            pass

    def output(self, arg):
        print(str(arg))
        # logging.exception(str(arg))


class MySQLDB():
    conf_mysql = {}

    def __init__(self, host=None, port=3306, user="root", password="123456", db="test",
                 ssl_ca="", ssl_cert="", ssl_key="",
                 cursorclass="pymysql.cursors.SSCursor"):
        self.db_mysql = self.conf_mysql.get("db", db)
        cursorclass = self.conf_mysql.get("cursorclass", cursorclass)
        ssl_ca = self.conf_mysql.get("ssl_ca", ssl_ca)
        ssl_cert = self.conf_mysql.get("ssl_cert", ssl_cert)
        ssl_key = self.conf_mysql.get("ssl_key", ssl_key)
        if ssl_ca:
            ssl = {"ssl": {"ca": ssl_ca, "cert": ssl_cert, "ssl_key": ssl_key}}
        else:
            ssl = None
        self.connect_args = {"host": self.conf_mysql.get("host", host),
                             "port": int(self.conf_mysql.get("port", port)),
                             "passwd": self.conf_mysql.get("password", password),
                             "user": user,
                             "ssl": ssl,
                             "cursorclass": eval(self.conf_mysql.get("cursorclass", cursorclass))}
        # self.connect(**self.connect_args)
        self.cursor = None
        self.client = None

    def connect(self, **connect_args):
        try:
            self.client = pymysql.connect(**connect_args)
            self.cursor = self.client.cursor()
            self.client.select_db(self.db_mysql)
        except Exception as e:
            raise Exception("---Connect MysqlServer Error--- [%s]" % str(e))

    @staticmethod
    def connect_again(func):
        def wrapper(self, *args, **kwargs):
            if self.cursor == None:
                self.connect(**self.connect_args)
            return func(*args, **kwargs)

        return wrapper

    def execute(self, sql=None):
        if self.cursor == None:
            self.connect(**self.connect_args)
        self.cursor.execute(sql)

    def read_all(self):
        if self.cursor == None:
            self.connect(**self.connect_args)
        return (self.cursor.fetchall())

    def read_many(self, size):
        if self.cursor == None:
            self.connect(**self.connect_args)
        while True:
            result = (self.cursor.fetchmany(size=size))
            if not result:
                break
            yield result

    def read_one(self):
        if self.cursor == None:
            self.connect(**self.connect_args)
        while True:
            result = (self.cursor.fetchone())
            if not result:
                break
            yield result

    def commit(self):
        """commit insert sql"""
        self.client.commit()

    def rollback(self):
        """commit insert sql"""
        self.client.rollback()

    def __del__(self):
        try:
            self.close()
        except:
            pass

    def close(self):
        try:
            self.cursor.close()
        except Exception as e:
            pass
        try:
            self.client.close()
        except Exception as e:
            pass
        self.cursor = None
        self.client = None

    def output(self, arg):
        logging.exception(str(arg))

    def create_database(self):
        if self.cursor == None:
            self.connect(**self.connect_args)
        try:
            self.cursor.execute('CREATE DATABASE IF NOT EXISTS %s' % self.db_mysql)
        except Exception as e:
            pass


class ESClient(RandomString):
    """
    Mainly for batch read-write encapsulation of database, reduce the load of database
    """
    es_cluster = []

    def __init__(self, cluster=None, **kwargs):
        if cluster:
            self.cluster = cluster
        else:
            self.cluster = self.es_cluster
        self.kwargs = kwargs
        self.client = Elasticsearch(self.cluster, timeout=30, max_retries=10, retry_on_timeout=True, **self.kwargs, )

    def bulk(self, actions):
        helpers.bulk(self.client, actions=actions)

    def __del__(self):
        self.close()

    def close(self):
        try:
            del self.client
        except:
            pass

    def output(self, arg):
        logging.exception(str(arg))
        # print(str(arg))


class ESApi():
    def restful(self, url, method, **kwargs):
        response = requests.request(method, url, **kwargs)
        return response


class Oracle2Es(RandomString):
    oracle_conf = {
        "host": "120.55.62.114",
        "port": "61521",
        "service_name": "ORCL",
        "user": "devcq",
        "password": "baIaGbnx9C"
    }
    es_cluster = es_cluster

    def __init__(self):
        self.client = Oracle(conf=self.oracle_conf)
        self.es_client = ESClient(cluster=self.es_cluster)

    def close(self):
        try:
            self.client.close()
            self.es_client.close()
        except:
            pass

    def __del__(self):
        try:
            self.close()
        except:
            pass

    def _id_func(self, data):
        return self.get_md5(str(time.time()) + str(random.randint(1, 1000)))

    def json_data(self, data):
        for k, v in data.items():
            if isinstance(v, datetime.datetime):
                v = v.strftime("%Y-%m-%d %H:%M:%S")
                data[k] = v

        return data

    def run(self, sql: str, sql_fields: list, index: str, es_fields=None, apply_func=None):
        self.client.execute(sql)
        # data = self.client.fetch(query="fetchall()")
        es_fields = {} if es_fields == None else es_fields
        out = []
        while True:
            i = self.client.fetch(query="fetchone()")
            if not i:
                break
            one = {}
            for j in range(len(i)):
                one[sql_fields[j]] = i[j]
            if apply_func:
                one.update(apply_func(one))

            if es_fields:
                new_one = {}
                for k, v in es_fields.items():
                    k1 = one.get(v, "None")
                    if k1 == "None":
                        new_one[k] = v
                    elif k1 == None:
                        new_one[k] = None
                    else:
                        new_one[k] = k1
                    new_one["DATA"] = self.json_data(one)
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           '_source': new_one}
            else:
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           '_source': one}

            out.append(es_data)
            if len(out) == 10000:
                self.es_client.bulk(actions=out)
                out = []

        if out:
            self.es_client.bulk(actions=out)

    def update(self, sql: str, sql_fields: list, index: str, es_fields=None, _id_func=None):
        self.client.execute(sql)
        # data = self.client.fetch(query="fetchall()")
        if _id_func == None:
            _id_func = self._id_func
        es_fields = {} if es_fields == None else es_fields
        out = []
        while True:
            i = self.client.fetch(query="fetchone()")
            if not i:
                break
            one = {}
            for j in range(len(i)):
                one[sql_fields[j]] = i[j]

            if es_fields:
                new_one = {}
                for k, v in es_fields.items():
                    k1 = one.get(v, "None")
                    if k1 == "None":
                        new_one[k] = v
                    elif k1 == None:
                        new_one[k] = None
                    else:
                        new_one[k] = k1
                    new_one["DATA"] = self.json_data(one)
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(new_one),
                           '_source': new_one}
            else:
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(one),
                           '_source': one}

            out.append(es_data)
            if len(out) == 10000:
                self.es_client.bulk(actions=out)
                out = []

        if out:
            self.es_client.bulk(actions=out)


class Mysql2Es(RandomString):
    conf_mysql = {
        "host": "rm-bp16a28yud1nxv2q1to.mysql.rds.aliyuncs.com",
        "port": 3306,
        "user": "disp_business_ro",
        "password": "VB+kznV48D",
        "cursorclass": "pymysql.cursors.SSCursor",
        "db": "sys_dispatcher_business"
    }
    es_cluster = es_cluster

    def __init__(self):
        self.client = MySQLDB(**self.conf_mysql)
        self.es_client = ESClient(cluster=self.es_cluster)

    def close(self):
        try:
            self.client.close()
            self.es_client.close()
        except:
            pass

    def __del__(self):
        try:
            self.close()
        except:
            pass

    def _id_func(self, data):
        return self.get_md5(str(time.time()) + str(random.randint(1, 1000)))

    def json_data(self, data):
        for k, v in data.items():
            if isinstance(v, datetime.datetime):
                v = v.strftime("%Y-%m-%d %H:%M:%S")
                data[k] = v

        return json.dumps(data, ensure_ascii=False)

    def run(self, sql: str, sql_fields: list, index: str, es_fields=None, apply_func=None):
        self.client.execute(sql)
        # data = self.client.fetch(query="fetchall()")
        es_fields = {} if es_fields == None else es_fields
        out = []
        for i in self.client.read_one():
            one = {}
            for j in range(len(i)):
                one[sql_fields[j]] = i[j]
            if apply_func:
                one.update(apply_func(one))

            if es_fields:
                new_one = {}
                for k, v in es_fields.items():
                    k1 = one.get(v, "None")
                    if k1 == "None":
                        new_one[k] = v
                    elif k1 == None:
                        new_one[k] = None
                    else:
                        new_one[k] = k1
                    new_one["DATA"] = self.json_data(one)
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           '_source': new_one}
            else:
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           '_source': one}

            out.append(es_data)
            if len(out) == 10000:
                self.es_client.bulk(actions=out)
                out = []

        if out:
            self.es_client.bulk(actions=out)

    def update(self, sql: str, sql_fields: list, index: str, es_fields=None, _id_func=None):
        self.client.execute(sql)
        # data = self.client.fetch(query="fetchall()")
        if _id_func == None:
            _id_func = self._id_func
        es_fields = {} if es_fields == None else es_fields
        out = []
        for i in self.client.read_one():
            one = {}
            for j in range(len(i)):
                one[sql_fields[j]] = i[j]

            if es_fields:
                new_one = {}
                for k, v in es_fields.items():
                    k1 = one.get(v, "None")
                    if k1 == "None":
                        new_one[k] = v
                    elif k1 == None:
                        new_one[k] = None
                    else:
                        new_one[k] = k1
                    new_one["DATA"] = self.json_data(one)
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(new_one),
                           '_source': new_one}
            else:
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(one),
                           '_source': one}

            out.append(es_data)
            if len(out) == 10000:
                # self.es_client.bulk(actions=out)
                out = []

        if out:
            print(out)
            # self.es_client.bulk(actions=out)

    def update_worksheet(self, sql: str, sql_fields: list, index: str, es_fields=None, _id_func=None):
        self.client.execute(sql)
        # data = self.client.fetch(query="fetchall()")
        if _id_func == None:
            _id_func = self._id_func
        es_fields = {} if es_fields == None else es_fields
        out = []
        for i in self.client.read_one():
            one = {}
            for j in range(len(i)):
                one[sql_fields[j]] = i[j]
            if es_fields:
                new_one = {}
                ####
                SHEET_PROPERTIES = json.loads(one.get("SHEET_PROPERTIES"))
                SHEET_MEMO = json.loads(one.get("SHEET_MEMO"))
                if not SHEET_MEMO:
                    continue
                del one["SHEET_PROPERTIES"]
                del one["SHEET_MEMO"]
                if not one.get("CURRENT_ACCEPTER_NAME"):
                    one["CURRENT_ACCEPTER_NAME"] = "无"
                one["CUSTOMER_HEAD"] = one.get("CURRENT_ACCEPTER_NAME") + str(one.get("CURRENT_ACCEPTER_ID", ""))
                one["SHEET_ID"] = str(one["SHEET_ID"])
                one["LICENSE"] = SHEET_PROPERTIES.get("baseVehicleVo", {}).get("license")
                one["GET_STATION_NAME"] = SHEET_PROPERTIES.get("getStation", {}).get("name")
                one["STATION_NAME"] = SHEET_PROPERTIES.get("getStation", {}).get("name")
                if one["STATION_NAME"] == None:
                    one["STATION_NAME"] = "无"
                one["RET_STATION_NAME"] = SHEET_PROPERTIES.get("returnStation", {}).get("name")
                one["DURATION"] = SHEET_PROPERTIES.get("workSheetVo", {}).get("duration")
                one["GET_AREA"] = SHEET_PROPERTIES.get("getStation", {}).get("areaName")
                one["RET_AREA"] = SHEET_PROPERTIES.get("returnStation", {}).get("areaName")

                for sheet in SHEET_MEMO:
                    one["CONTENT"] = sheet.get("organizationName", "") + ": " + sheet.get("operateName",
                                                                                          "") + "," + sheet.get(
                        "message", "")
                    one["ACTION_TIME"] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(sheet.get("time") / 1000))

                    ####

                    for k, v in es_fields.items():
                        k1 = one.get(v, "None")
                        if k1 == "None":
                            new_one[k] = v
                        elif k1 == None:
                            new_one[k] = None
                        else:
                            new_one[k] = k1
                        new_one["DATA"] = self.json_data(one)
                    es_data = {'_op_type': 'index',
                               '_index': index,
                               '_type': 'doc',
                               "_id": _id_func(new_one),
                               '_source': new_one}
                    out.append(es_data)
                    if len(out) >= 10000:
                        self.es_client.bulk(actions=out)
                        out = []
            else:
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(one),
                           '_source': one}

                out.append(es_data)
                if len(out) >= 10000:
                    self.es_client.bulk(actions=out)
                    out = []

        if out:
            # for i in out:
            #     print(json.dumps(i,ensure_ascii=False))
            self.es_client.bulk(actions=out)


class Oracle2Csv(Oracle2Es):

    def creae_dir(self, dir_):
        try:
            os.makedirs(dir_)
        except:
            pass

    def run(self, sql: str, sql_fields: list, index: str, apply_func=None, **kwargs):
        self.creae_dir(f"./data/{index}")
        self.client.execute(sql)
        out = [sql_fields]
        while True:
            i = self.client.fetch(query="fetchone()")
            if not i:
                break
            out.append(i)
        with open("origin.csv", 'w', newline='') as f:
            ff = csv.writer(f)
            ff.writerows(out)


class Business(ESClient):
    es_cluster = es_cluster

    def __init__(self, **kwargs):
        super(Business, self).__init__(cluster=self.es_cluster, **kwargs)

    def wildcard(self, **kwargs):
        out = []
        for k, v in kwargs.items():
            for i in v:
                boost = len(i)
                if boost == 0:
                    boost = 1
                out.append({
                    "wildcard":
                        {
                            k:
                                {"value": f"*{i}*", "boost": 2 ** boost}
                        },
                })
        return out

    def match_phrase(self, **kwargs):
        out = []
        for k, v in kwargs.items():
            for i in v:
                boost = len(i)
                if boost == 0:
                    boost = 1
                out.append({
                    "match_phrase":
                        {
                            k:
                                {"query": f"*{i}*", "boost": 2 ** boost, "slop": 1}
                        },
                })
        return out

    def range_time(self, **kwargs):
        out = []
        for k, v in kwargs.items():
            return {"range": {k: {"from": v[0], "to": v[1]}}}

    def filter(self, **kwargs):
        out = []
        for k, v in kwargs.items():
            if v == None:
                continue
            out.append({
                "term":
                    {
                        k:
                            {"value": f"{v}"}
                    },
            })
        return out

    def sort(self, **kwargs):
        out = []
        for k, v in kwargs.items():
            out.append({k: v})

        return out

    def page(self, page_num, page_size):
        out = {"from": (page_num - 1) * page_size,
               "size": page_size,
               }
        return out

    def aggs(self, fields):
        out = {}
        for i in fields:
            out[f"distinct_{i}S"] = {
                "cardinality": {
                    "field": i
                }
            }
        return out

    def dsl(self, page_num=1, page_size=10, search_condition=None, sort_condition=None, time_condition=None,
            filter_condition=None, aggs_condition=None):
        """
        :param page_num: 1
        :param page_size: 10
        :param search_condition: {"DATA":["完成"]}
        :param sort_condition:  {"_source":"desc","ACTION_TIME":"desc"}
        :param time_condition:  {"ACTION_TIME":["2020-01-01",None]}
        :param filter_condition: {"DATA":"完成","DATA":"傻"}
        :param aggs_condition: ["HEAD"]
        :return:
        """
        search_condition = {} if search_condition == None else search_condition
        sort_condition = {} if sort_condition == None else sort_condition
        time_condition = {} if time_condition == None else time_condition
        filter_condition = {} if filter_condition == None else filter_condition
        aggs_condition = [] if aggs_condition == None else aggs_condition

        out = {}
        # should_ = self.wildcard(**search_condition)
        should_ = []
        match_phrase_ = self.match_phrase(**search_condition)
        filter_ = self.filter(**filter_condition)

        if time_condition:
            time_ = self.range_time(**time_condition)
            filter_.append(time_)

        out["query"] = {"bool": {"filter": {"bool": {"should": match_phrase_}}, "must": filter_, }}

        if sort_condition:
            sort_ = self.sort(**sort_condition)
            out["sort"] = sort_
        out["aggs"] = self.aggs(aggs_condition)
        out["min_score"] = 0
        out.update(self.page(page_num, page_size))
        return out

    def dsl_args(self, arguments):
        KEYWORD = arguments.get("condition", {}).get("KEYWORD", "")
        page_num = arguments.get("condition", {}).get("page_num", 1)
        page_size = arguments.get("condition", {}).get("page_size", 10)
        STARTTIME = arguments.get("condition", {}).get("STARTTIME", None)
        ENDTIME = arguments.get("condition", {}).get("ENDTIME", None)
        data = self.cut(KEYWORD)
        cut_word = data.get("cut_word")
        cut_word.append(KEYWORD)
        search_condition = {}
        search_condition["DATA"] = cut_word
        time_condition = {"ACTION_TIME": [STARTTIME, ENDTIME]}
        sort_condition = {"_score": "desc", "ACTION_TIME": "desc"}
        ##########################todo
        filter_condition = {}
        aggs_condition = []
        ##########################
        return {"page_num": page_num, "page_size": page_size, "search_condition": search_condition,
                "sort_condition": sort_condition, "time_condition": time_condition,
                "filter_condition": filter_condition, "aggs_condition": aggs_condition}

    def cut(self, text):
        seg_list = jieba.cut(text, cut_all=False)
        tags = analyse.extract_tags(text, topK=100)
        res = []
        if len(text) == 1:
            res.append(text)
        for i in list(set(seg_list)):
            if len(i) > 1:
                res.append(i)
        return {"cut_word": res, "keyword_word": list(set(tags))}


class OffLine2Es(RandomString):
    es_cluster = es_cluster

    def __init__(self, data=None):
        self.es_client = ESClient(cluster=self.es_cluster)
        self.data = data

    def close(self):
        try:
            self.es_client.close()
        except:
            pass

    def __del__(self):
        try:
            self.close()
        except:
            pass

    def _id_func(self, data):
        return self.get_md5(str(time.time()) + str(random.randint(1, 1000)))

    def json_data(self, data):
        for k, v in data.items():
            if isinstance(v, datetime.datetime):
                v = v.strftime("%Y-%m-%d %H:%M:%S")
                data[k] = v

        return json.dumps(data, ensure_ascii=False)

    def update(self, sql: str, sql_fields: list, index: str, es_fields=None, _id_func=None):
        if _id_func == None:
            _id_func = self._id_func
        es_fields = {} if es_fields == None else es_fields
        out = []
        for i in self.data:
            one = i
            if es_fields:
                new_one = {}
                for k, v in es_fields.items():
                    k1 = one.get(v, "None")
                    if k1 == "None":
                        new_one[k] = v
                    elif k1 == None:
                        new_one[k] = None
                    else:
                        new_one[k] = k1
                    new_one["DATA"] = self.json_data(one)
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(new_one),
                           '_source': new_one}
            else:
                es_data = {'_op_type': 'index',
                           '_index': index,
                           '_type': 'doc',
                           "_id": _id_func(one),
                           '_source': one}

            out.append(es_data)
            if len(out) == 10000:
                self.es_client.bulk(actions=out)
                out = []

        if out:
            # print(json.dumps(out))
            self.es_client.bulk(actions=out)


class RedisDB():
    """Redis客户端"""

    def __init__(self, host='localhost', port=6379,
                 db=0, password=None, socket_timeout=None,
                 socket_connect_timeout=None,
                 socket_keepalive=None, socket_keepalive_options=None,
                 connection_pool=None, unix_socket_path=None,
                 encoding='utf-8', encoding_errors='strict',
                 charset=None, errors=None,
                 decode_responses=False, retry_on_timeout=False,
                 ssl=False, ssl_keyfile=None, ssl_certfile=None,
                 ssl_cert_reqs='required', ssl_ca_certs=None,
                 max_connections=None):
        self.client = redis.Redis(host=host, port=port,
                                  db=db, password=password, socket_timeout=socket_timeout,
                                  socket_connect_timeout=socket_connect_timeout,
                                  socket_keepalive=socket_keepalive, socket_keepalive_options=socket_keepalive_options,
                                  connection_pool=connection_pool, unix_socket_path=unix_socket_path,
                                  encoding=encoding, encoding_errors=encoding_errors,
                                  charset=charset, errors=errors,
                                  decode_responses=decode_responses, retry_on_timeout=retry_on_timeout,
                                  ssl=ssl, ssl_keyfile=ssl_keyfile, ssl_certfile=ssl_certfile,
                                  ssl_cert_reqs=ssl_cert_reqs, ssl_ca_certs=ssl_ca_certs,
                                  max_connections=max_connections)

    def get_client(self):
        return self.client

    def __del__(self):
        self.close()

    def close(self):
        try:
            del self.client
        except:
            pass
