# -- coding: utf-8 --

import math
import time
from functools import wraps
import logging
from pathlib import Path
import os
import numpy as np
from retrying import retry
from typing import List, Union, Tuple, Iterable
import threading


logging.basicConfig(format="[%(asctime)s] %(levelname)s | %(message)s", datefmt='%Y-%m-%d %H:%M:%S')

db_config = {
    "mc_db_url": "mysql+pymysql://litb_Sselect:litb_dhh8ETVM@vela.mys.read.tbox.me:3307/merchant_center_vela_v1",
    "crawler_db_url": "mysql+pymysql://bi_crawler:bi@First123!@172.23.8.28:3306/crawler",  # mysql -h172.23.8.28 -ubi_crawler -p'bi@First123!' -A crawler
    #"crawler_db_url": "mysql+pymysql://crawler:crawler@localhost/crawler_db",
    "bi_oracle_db_url": "oracle://waapp:WazzZZ@172.16.0.178:1521/dw01",
    "sprout_db_url": "postgresql://sprout:sprout@10.50.0.209/sprout",
    # "sprout_db_url": "postgresql://sprout:sprout@192.168.160.20/sprout",
    "ez_db_url": "mysql+pymysql://ranking_push:NyF3bxNYHECwOQwI@10.20.30.153/ez_data_port",  # mysql -h10.20.30.153 -uranking_push -pNyF3bxNYHECwOQwI -A ez_data_port
    "aims_db_url": "mysql+pymysql://litbops:9NLYoEM5t5c9@172.19.5.154/aims",  # mysql -h172.19.5.154 -ulitbops -p9NLYoEM5t5c9 -A aims
    "pms_db_url": "mysql+pymysql://pms_sprout:SM9bgx6XKr@172.23.8.248/lingxiao_pms",
    "pc_bps_db_url": "mysql+pymysql://litb_sprout:XtuwEJUBHQ@172.23.8.245:3306/lingxiao_pc_bps",
    "recomm_db_url": "postgresql://recomm:bz8!@m7k8a@10.50.0.217/litb_recomm",
    "recomm_eu_db_url": "postgresql://recomm:bz8!@90!6liGHt@10.242.3.172/litb_recomm",
}

test_db_config = {
    "sprout_db_url": "postgresql://sprout:sprout@10.50.0.209/sprout_test",
    "recomm_eu_db_url": "",  # 测试环境不存在欧洲站，会报AttributeError
    "recomm_db_url": "postgresql://recomm:bz8!@m7k8a@10.50.0.217/test_recomm",
}

feature_engine_config = {
    "host": "10.50.0.219",
    "port": "19530",
    "collection_name": "litb_img_vgg16_cos"
}


def get_command_param(k: str, default=None, type=str):
    """从命令行中读取参数"""
    from argparse import ArgumentParser
    ap = ArgumentParser()
    ap.add_argument(f"--{k}", default=default, type=type)
    args, extra = ap.parse_known_args()
    return getattr(args, k)


class DBPool:
    """DB pool, 目前支持sprout_db"""
    def __init__(self, use_test_db=None, pool_size=5):
        self.pool_size = pool_size
        self._dbs = {}
        self._lock = threading.Lock()

        if use_test_db is None:
            self.use_test_db = get_command_param("use_test_db") == "true"
        else:
            self.use_test_db = bool(use_test_db)

    def _get_db_url(self, key):
        url = None
        if self.use_test_db:
            url = test_db_config.get(key)

        if url is None:
            url = db_config.get(key)

        return url

    def remove(self, item):
        if item in self._dbs:
            self._dbs[item].close()
            del self._dbs[item]

    def _make_close(self, item):
        def wrap_close(fn):
            def wrapper(*args, **kwargs):
                get_logger("DBPool").debug(f"close db: {item}")
                # del self._dbs[item]
                return fn(*args, **kwargs)
            return wrapper
        return wrap_close

    def __getattr__(self, item):
        if isinstance(item, str) and item.endswith("_db"):
            db = self._dbs.get(item)
            if not (db and db.open):
                with self._lock:
                    db = self._dbs.get(item)
                    if not (db and db.open):
                        url = self._get_db_url(item + "_url")
                        if url:
                            get_logger("DBPool").debug(f"create db {item} for url: {url}")
                            import records
                            db = records.Database(url, pool_pre_ping=True, max_overflow=10, pool_size=self.pool_size, pool_recycle=7200)
                            # 给db注册一个close方法
                            db.close = self._make_close(item)(db.close)
                            db.db_name = item
                            self._dbs[item] = db

            if db:
                return db
            else:
                raise AttributeError("no config for db: " + str(item))
        else:
            raise AttributeError("no attribute: " + str(item))

    def close(self):
        for db in self._dbs.values():
            db.close()
        self._dbs.clear()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.close()


def ceil_split_size(total_size, split_size):
    batch = ceil_split_batch(total_size, split_size)
    return math.ceil(total_size / batch) if batch > 0 else 0


def ceil_split_list(k_list: list, split_size):
    """尽量按照batch_size进行切分，如果有部分不足batch_size, 则适当增长batch_size，令切分数量不至于因此多+1，适合多线程/多进程任务切分模型"""

    if split_size <= 0:
        raise ValueError(f"split_size should be greater than 0, but get {split_size}")

    batch_size = ceil_split_size(len(k_list), split_size)
    if batch_size <= 0:
        return []

    return list_split(k_list, batch_size)


def debug_duration(fn):
    """计算性能的修饰器"""
    @wraps(fn)
    def measure_time(*args, **kwargs):
        t1 = time.time()
        result = fn(*args, **kwargs)
        t2 = time.time()
        get_logger("debug_duration").debug(f"LogTime: {fn.__name__} took {t2 - t1: .5f} s")
        return result
    return measure_time


def list_split(items: list, size):
    if len(items) == 0:
        return []
    return [items[i:i + size] for i in range(0, len(items), size)]


def dict_split(d: dict, size):
    if len(d) == 0:
        return []
    return list_split([k for k, v in d.items()], size)


def ceil_split_batch(total_size, split_size):
    if total_size == 0:
        return 0
    d, m = divmod(total_size, split_size)
    if m:
        d += 1
    return d


@retry(stop_max_attempt_number=2)
def notify_dev(msg: str):
    """通知开发"""
    import requests
    requests.post("http://sprout.elitb.com/api/weixin/sendmsg", data=msg.encode("utf-8"))


@retry(stop_max_attempt_number=2)
def notify_sprout(username: Union[str, Iterable[str]], msg: str):
    """
    通知指定的人。
    """
    if not username:
        return
    import requests
    to_users = username if isinstance(username, str) else ",".join(username)
    requests.post(f"http://sprout.elitb.com/api/weixin/sprout/sendmsg/user?userName={to_users}", data=msg.encode("utf-8"))


class FileCache:

    def __init__(self, context, fname):
        self.p = Path("~/tmp").expanduser() / context / fname
        if not self.p.parent.is_dir():
            self.p.parent.mkdir(parents=True)

    def has_cache(self):
        return self.p.is_file() and os.path.getsize(self.p) > 0

    def save(self, obj):
        if obj is None:
            if self.p.is_file():
                import os
                os.remove(str(self.p))
            return

        import pickle
        with open(str(self.p), "wb") as f:
            pickle.dump(obj, f)

    def load(self):
        if self.p.is_file():
            import pickle
            return pickle.loads(self.p.read_bytes())
        else:
            return None

    def get_with_cache(self, v=None):
        if self.has_cache():
            return self.load()
        else:
            if isinstance(v, callable):
                r = v()
            else:
                r = v
            if r is not None:
                self.save(r)
            return r


def file_cache(context, fname, flag=True):
    if not isinstance(flag, callable):
        if flag:
            flag = lambda: True
        else:
            flag = lambda: False

    def wrapper(fn):
        @wraps(fn)
        def do_cache(*args, **kwargs):
            fc = FileCache(context, fname)
            if fc.has_cache() and flag():
                return fc.load()

            r = fn(*args, **kwargs)

            fc.save(r)
            return r
        return do_cache
    return wrapper


def gc_follows(fn):
    @wraps(fn)
    def wrapper(*args, **kwargs):
        r = fn(*args, **kwargs)
        import gc
        gc.collect()
        return r
    return wrapper


def get_logger(name) -> logging.Logger:
    if name:
        logger = logging.getLogger(name)
        logger.setLevel(logging.DEBUG)
        return logger
    else:
        return logging.root


def group_by(data, key_fn) -> dict:
    """将列表转换为dict对象，拥有相同key的会形成列表。
    Parameters
    ----------
    data: 可迭代对象，list，tuple或者其他。
    key_fn: data中每个元素中提取出用于分组的key的函数。
    """
    result = {}
    for r in data:
        k = key_fn(r)
        if k not in result:
            result[k] = []
        result[k].append(r)
    return result


def cos_sim(vector_a, vector_b):
    """
    计算两个向量之间的余弦相似度
    :param vector_a: 向量 a
    :param vector_b: 向量 b
    :return: sim
    """
    vector_a = np.mat(vector_a)
    vector_b = np.mat(vector_b)
    num = float(vector_a * vector_b.T)
    denom = np.linalg.norm(vector_a) * np.linalg.norm(vector_b)
    cos = num / denom
    return cos


class CountLog:
    def __init__(self, name, log_every_time=100, logger_name=None):
        self.success = 0
        self.fail = 0
        self.unknown = 0
        self.log_every_time = log_every_time
        self.name = name
        self.logger = get_logger(logger_name)

    @property
    def count(self):
        return self.success + self.fail + self.unknown

    def count_and_log(self):
        self.unknown += 1
        self.try_log()

    def count_success_and_log(self):
        self.success += 1
        self.try_log()

    def count_fail_and_log(self):
        self.fail += 1
        self.try_log()

    def try_log(self):
        if self.count % self.log_every_time == 0:
            self.log()

    def log(self):
        if self.success > 0 or self.fail > 0:
            self.logger.info(f"{self.name} processed: {self.count}, success: {self.success}, fail: {self.fail}, passed: {self.unknown}")
        else:
            self.logger.info(f"{self.name} processed: {self.count}")


MILVUS_HOST = os.getenv("MILVUS_HOST", "10.51.3.34")
MILVUS_PORT = os.getenv("MILVUS_PORT", 19530)
LITB_COLLECTION_NAME = os.getenv("LITB_COLLECTION_NAME", "litb_img_yolo_resnet50")
LITB_COLLECTION_NAME_IP = os.getenv("LITB_COLLECTION_NAME_IP", "litb_img_yolo_resnet50_ip")
VECTOR_DIMENSION = os.getenv("VECTOR_DIMENSION", 2048)
DATA_PATH = os.getenv("DATA_PATH", "data/litb/big_images")
COCO_MODEL_PATH = os.getenv("OBJECT_PATH", "data/yolov3_darknet")
# OID_MODEL_PATH = os.getenv("OBJECT_PATH", "data/yolov3_darknet")
YOLO_CONFIG_PATH = os.getenv("OBJECT_PATH", "data/yolov3_darknet/yolo.yml")  # yolo.yml
OID_MODEL_PATH = os.getenv("OBJECT_PATH", "data/yolo.cfg")
OID_WEIGHT_PATH = os.getenv("OBJECT_PATH", "data/yolo.weights")
OID_CLASS_PATH = os.getenv("OBJECT_PATH", "data/yolo.names")


