from __future__ import absolute_import, unicode_literals

import re

import aioredis
import psycopg2
import redis
from celery import Task
# from motor.motor_asyncio import AsyncIOMotorClient
from pymongo import MongoClient, errors
from elasticsearch import Elasticsearch
import aiopg
import logging

import conf_yunwei


class ConnectionTask(Task):
    abstract = True
    _dmdb = None
    _dmdb_ctrip = None
    _static_ctrip = None
    _smdb = None
    _pmdb = None
    _static = None
    _pmdb_cold = None
    _static_es = {}
    _static_stg_es = {}
    _drdb = {}
    _srdb = {}
    _dpprdb = {}
    _prdb_mast = {}
    _prdb_paym = {}
    _prdb_activity = {}
    _dpgdb = {}
    _spgdb = {}
    _ppgdb = {}
    _rrdb = None

    @property
    def dmdb(self):
        if ConnectionTask._dmdb is None:
            _mc = MongoClient(conf.DEV_MONGO_HOST)
            ConnectionTask._dmdb = _mc['boluome']
        return ConnectionTask._dmdb

    @property
    def pmdb_cold(self):
        if ConnectionTask._pmdb_cold is None:
            _mc = MongoClient(conf.PRO_COLD_MONGO_HOST)
            ConnectionTask._pmdb_cold = _mc['boluome']
        return ConnectionTask._pmdb_cold

    @property
    def dmdb_ctrip(self):
        if ConnectionTask._dmdb_ctrip is None:
            _mc = MongoClient(conf.CTRIP_MONGO_HOST)
            ConnectionTask._dmdb_ctrip = _mc['ctrip']
        return ConnectionTask._dmdb_ctrip

    @property
    def smdb(self):
        if ConnectionTask._smdb is None:
            _mc = MongoClient(conf.STG_MONGO_HOST)
            ConnectionTask._smdb = _mc['boluome']
        return ConnectionTask._smdb

    @property
    def pmdb(self):
        if ConnectionTask._pmdb is None:
            _mc = MongoClient(conf.PRO_MONGO_HOST)
            ConnectionTask._pmdb = _mc['boluome']
        return ConnectionTask._pmdb

    @property
    def pmdb_master(self):
        if ConnectionTask._pmdb is None:
            _mc = MongoClient(conf.PRO_MONGO_HOST_MASTER)
            ConnectionTask._pmdb = _mc['boluome']
        return ConnectionTask._pmdb

    @property
    def static(self):
        if ConnectionTask._static is None:
            _mc = MongoClient(conf.STATIC_MONGO_HOST)
            ConnectionTask._static = _mc['boluome']
        return ConnectionTask._static

    def static_es(self, collection, mappings=None):
        if ConnectionTask._static_es.get(collection) is None:
            _es = Elasticsearch(["192.168.1.16:9200", "192.168.1.19:9200", "192.168.1.23:9200"],
                                # sniff before doing anything
                                http_auth=('elastic', '1CHYC5IO6TrgZM66kyOw'),
                                sniff_on_start=True,
                                # refresh nodes after a node fails to respond
                                sniff_on_connection_fail=True,
                                # and also every 60 seconds
                                sniffer_timeout=60)
            # ignore 400 cause by IndexAlreadyExistsException when creating an index
            _es.indices.create(index=collection, body=mappings, ignore=[400, 404])
            # ignore 404 and 400
            # _es.indices.delete(index=collection, ignore=[400, 404])
            ConnectionTask._static_es[collection] = _es
        return ConnectionTask._static_es[collection]

    def stg_es(self, collection, mappings=None):
        if ConnectionTask._static_stg_es.get(collection) is None:
            _es = Elasticsearch(["192.168.0.15:9200", "192.168.0.15:9201", "192.168.0.15:9202"],
                                # sniff before doing anything
                                http_auth=('elastic', '1CHYC5IO6TrgZM66kyOw'),
                                sniff_on_start=True,  # 连接前测试
                                # refresh nodes after a node fails to respond
                                sniff_on_connection_fail=True,  # 节点无响应时刷新节点
                                # and also every 60 seconds
                                sniffer_timeout=60  # 设置超时时间
                                )
            # ignore 400 cause by IndexAlreadyExistsException when creating an index
            _es.indices.create(index=collection, body=mappings, ignore=[400, 404])
            # ignore 404 and 400
            # _es.indices.delete(index=collection, ignore=[400, 404])
            ConnectionTask._static_stg_es[collection] = _es
        return ConnectionTask._static_stg_es[collection]

    @property
    def static_ctrip(self):
        if ConnectionTask._static_ctrip is None:
            _mc = MongoClient(conf.STATIC_MONGO_HOST)
            ConnectionTask._static_ctrip = _mc['ctrip']
        return ConnectionTask._static_ctrip

    def drdb(self, db):
        if ConnectionTask._drdb.get(db) is None:
            ConnectionTask._drdb[db] = redis.StrictRedis(host=conf.DEV_REDIS_NEW_HOST,
                                                         port=conf.REDIS_PORT,
                                                         db=db, charset="utf-8",
                                                         decode_responses=True)
        return ConnectionTask._drdb.get(db)

    def srdb(self, db):
        if ConnectionTask._srdb.get(db) is None:
            ConnectionTask._srdb[db] = redis.StrictRedis(host=conf.STG_REDIS_NEW_HOST,
                                                         port=conf.REDIS_PORT,
                                                         db=db, charset="utf-8",
                                                         decode_responses=True)
        return ConnectionTask._srdb.get(db)

    def dpprdb(self, db):
        if ConnectionTask._dpprdb.get(db) is None:
            ConnectionTask._dpprdb[db] = redis.StrictRedis(host=conf.DPP_REDIS_HOST,
                                                           port=conf.REDIS_PORT,
                                                           db=db, charset="utf-8",
                                                           decode_responses=True)
        return ConnectionTask._dpprdb.get(db)

    def prdb_mast(self, db):
        if ConnectionTask._prdb_mast.get(db) is None:
            ConnectionTask._prdb_mast[db] = redis.StrictRedis(host=conf.PRO_REDIS_MAST_HOST,
                                                              port=conf.REDIS_PORT,
                                                              db=db, charset="utf-8",
                                                              decode_responses=True)
        return ConnectionTask._prdb_mast.get(db)

    def rdb_zabbix(self, db):
        if ConnectionTask._prdb_mast.get(db) is None:
            ConnectionTask._prdb_mast[db] = redis.StrictRedis(host=conf_yunwei.ZABBIX_REDIS_HOST,
                                                              port=conf_yunwei.REDIS_PORT,
                                                              db=db, charset="utf-8",
                                                              decode_responses=True)
        return ConnectionTask._prdb_mast.get(db)

    def prdb_aliyun(self, db):
        if ConnectionTask._prdb_mast.get(db) is None:
            ConnectionTask._prdb_mast[db] = redis.StrictRedis(host=conf.ALIYUN_REDIS_MAST_HOST,
                                                              port=conf.ALIYUN_REDIS_PORT,
                                                              db=db, charset="utf-8",
                                                              password='!QAZ2wsx',
                                                              decode_responses=True)
        return ConnectionTask._prdb_mast.get(db)

    def prdb_paym(self, db):
        if ConnectionTask._prdb_paym.get(db) is None:
            ConnectionTask._prdb_paym[db] = redis.StrictRedis(host=conf.PRO_REDIS_PAYM_HOST,
                                                              port=conf.REDIS_PORT,
                                                              db=db, charset="utf-8",
                                                              decode_responses=True)
        return ConnectionTask._prdb_paym.get(db)

    def prdb_activity(self, db):
        if ConnectionTask._prdb_activity.get(db) is None:
            ConnectionTask._prdb_activity[db] = redis.StrictRedis(host=conf.PR0_ACTIVITY_REDIS_HOST,
                                                                  port=conf.REDIS_PORT,
                                                                  db=db, charset="utf-8",
                                                                  decode_responses=True)
        return ConnectionTask._prdb_activity.get(db)

    def dpgdb(self, db):
        if ConnectionTask._dpgdb.get(db) is None:
            ConnectionTask._dpgdb[db] = psycopg2.connect(
                "host=192.168.0.8 dbname={} user=root password=Boluome123".format(db))
        return ConnectionTask._dpgdb.get(db)

    def spgdb(self, db):
        if ConnectionTask._spgdb.get(db) is None:
            ConnectionTask._spgdb[db] = psycopg2.connect(
                "host=192.168.2.10 dbname={} user=root password=Boluome123".format(db))
        return ConnectionTask._spgdb.get(db)

    def ppgdb(self, db):
        if ConnectionTask._ppgdb.get(db) is None:
            ConnectionTask._ppgdb[db] = psycopg2.connect(
                'host=pg.localdomain dbname={} user=root password=Boluome123'.format(db))
        return ConnectionTask._ppgdb.get(db)

    @property
    def rrdb(self):
        if ConnectionTask._rrdb is None:
            ConnectionTask._rrdb = redis.StrictRedis(host=conf.RRDB_HOST,
                                                     port=conf.RRDB_PORT,
                                                     db=conf.RRDB_DB_10,
                                                     encoding='utf-8',
                                                     decode_responses=True)
        return ConnectionTask._rrdb

    # @property
    # def mallrrdb(self):
    #     if ConnectionTask._rrdb is None:
    #         ConnectionTask._rrdb = redis.StrictRedis(host=conf.MALL_RRDB_HOST,
    #                                                  port=conf.MALL_RRDB_PORT,
    #                                                  db=conf.MALL_RRDB_DB,
    #                                                  encoding='utf-8',
    #                                                  decode_responses=True)
    #     return ConnectionTask._rrdb

    def __getitem__(self, key):
        if key == "dmdb":
            return ConnectionTask.dmdb
        elif key == "smdb":
            return ConnectionTask.smdb
        elif key == "pmdb":
            return ConnectionTask.pmdb
        elif key == "static":
            return ConnectionTask.static
        else:
            raise KeyError


def mongo_upsert_operation(mdb, database, query, update, upsert=False):
    """
    封装mongo,update_one()操作
    mdb(Object):
    database(str):
    query(dict):
    update(dict):
    upsert(bool):
    """
    try:
        mdb[database].update_one(query, update, upsert=upsert)
    except errors.DuplicateKeyError as why:
        mongo_upsert_operation(mdb, database, query, update, upsert)
    except errors.DocumentTooLarge:
        logging.warning("%s", ["errors.DocumentTooLarge", database, query])
        return


def insert_sql_parse(sql):
    pattern = re.compile("\(.+?\)")
    r = re.findall(pattern, sql)
    r[0] = r[0].replace('(', '')
    r[0] = r[0].replace(')', '')
    r[0] = r[0].replace(' ', '')
    keys = r[0].split(',')
    assert len(keys) == len(r[1].split(','))
    return keys


def pg_insert(sql, data, conn):
    """
    封装PostgreSql语句之 INSERT INTO
    Args:
        sql(str):
        data(list):[{},{}]
    Returns:
        out: list of tuples
    """
    assert isinstance(sql, str)
    assert isinstance(data, list)
    assert ";" in sql
    sql = sql.replace('?', '%s')
    keys = insert_sql_parse(sql)
    # conn.commit()
    conn.rollback()
    with conn.cursor() as curs:
        sql_pg = ""
        for item in data:
            value = [item.get(k) for k in keys]
            sql_pg += curs.mogrify(sql, value).decode()
        curs.execute(sql_pg)
        conn.commit()
    return


def update_sql_parse(sql):
    sql_list = sql.split(' ')
    sql_list = [v for v in sql_list if '%s' in v]
    sql_clean = ''.join(sql_list)
    sql_clean = sql_clean.replace(';', '')
    sql_clean = sql_clean.replace('=%s', ',')
    keys = [v for v in sql_clean.split(',') if v]
    assert len(keys) > 1
    return keys


def pg_update(sql, data, conn):
    """
    封装PostgreSql语句之 UPDATE
    Args:
        sql(str):
        data(list):[{},{}]
    Returns:
        out: list of tuples
    """
    assert isinstance(sql, str)
    assert isinstance(data, list)
    assert ";" in sql
    sql = sql.replace('?', '%s')
    keys = update_sql_parse(sql)
    # conn.commit()
    conn.rollback()
    with conn.cursor() as curs:
        sql_pg = ""
        for item in data:
            value = [item.get(k) for k in keys]
            sql_pg += curs.mogrify(sql, value).decode()
        curs.execute(sql_pg)
        conn.commit()
    return


def pg_select(sql, conn, field=None):
    """
    封装PostgreSql语句之 SELECT
    Args:
        sql(str):
    Returns:
        out: list of dict
    datas=pg_select('SELECT key FROM boluome_settlement')
    """
    assert isinstance(sql, str)
    assert ";" in sql
    conn.rollback()
    with conn.cursor() as curs:
        curs.execute(sql)
        keys = curs.description
        data_all = curs.fetchall()
        conn.commit()
    data = []
    for item in data_all:
        data.append({v[0]: item[k] for k, v in enumerate(keys)})
    return data if not field else (data, [v.name for v in keys])


def get_in(coll, path=None, default=None):
    """Returns a value at path in the given nested collection.
    Args:
        coll(object):
        path(str):'a.0.b.c'
    """
    if path is None:
        return coll

    for key in path.split('.'):
        try:
            if isinstance(coll, dict):
                coll = coll[key]
            elif isinstance(coll, list):
                coll = coll[int(key)]
            else:
                raise KeyError
        except (KeyError, IndexError, TypeError, ValueError):
            return default
    return coll


def iteritems(coll):
    return coll.items() if hasattr(coll, 'items') else coll


def merge_with(*dicts):
    """Merge several dicts."""
    dicts = list(dicts)
    if not dicts:
        return {}
    elif len(dicts) == 1:
        return dicts[0]

    lists = {}
    for c in dicts:
        for k, v in iteritems(c):
            lists[k] = v

    return lists


# async def mdb_setup(env):
#     if env == 'dev':
#         client = AsyncIOMotorClient(conf.DEV_MONGO_HOST)
#     elif env == 'stg':
#         client = AsyncIOMotorClient(conf.STG_MONGO_HOST)
#     elif env == 'pro':
#         client = AsyncIOMotorClient(conf.PRO_MONGO_HOST)
#     return client.boluome


async def rrdb_setup(loop, env, db):
    if env == 'dev':
        return await aioredis.create_redis_pool(
            (conf.DEV_RRDB_HOST, conf.RRDB_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env == 'stg':
        return await aioredis.create_redis_pool(
            (conf.STG_RRDB_HOST, conf.RRDB_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env == 'pro':
        return await aioredis.create_redis_pool(
            (conf.PRO_RRDB_HOST, conf.RRDB_PORT),
            db=db,
            encoding='utf8',
            loop=loop)


async def rdb_setup(loop, env, db):
    if env in ['dev', 0]:
        return await aioredis.create_redis_pool(
            (conf.DEV_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env in ['stg', 1]:
        return await aioredis.create_redis_pool(
            (conf.STG_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env in ['pro', 2]:
        return await aioredis.create_redis_pool(
            (conf.PR0_PAYMENT_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env in ['pro_master', 3]:
        return await aioredis.create_redis_pool(
            (conf.PRO_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)


async def rdb_dpp_setup(loop, db):
    return await aioredis.create_redis_pool(
        (conf.DPP_REDIS_HOST, conf.REDIS_PORT),
        db=db,
        encoding='utf8',
        loop=loop)


async def rdb_activity_setup(loop, env, db):
    if env in ['dev', 0]:
        return await aioredis.create_redis_pool(
            (conf.DEV_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env in ['stg', 1]:
        return await aioredis.create_redis_pool(
            (conf.STG_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)
    elif env in ['pro', 2]:
        return await aioredis.create_redis_pool(
            (conf.PR0_ACTIVITY_REDIS_HOST, conf.REDIS_PORT),
            db=db,
            encoding='utf8',
            loop=loop)


async def pg_setup(env, db="account"):
    if env in ['dev', 0]:
        return await aiopg.create_pool(conf.DEV_PG_ACCOUNT.format(db))
    elif env in ['stg', 1]:
        return await aiopg.create_pool(conf.STG_PG_ACCOUNT.format(db))
    elif env in ['pro', 2]:
        return await aiopg.create_pool(conf.PRO_PG_ACCOUNT.format(db))