# -*- coding: utf-8 -*-
# standard
import os
import sys
import platform
import warnings
import inspect
from enum import Enum
from functools import partial
from datetime import datetime
from datetime import date
from datetime import timedelta
from typing import Union
from typing import Callable
from typing import List
from typing import Dict
from typing import Tuple
from typing import Optional
from typing import Any
from decimal import Decimal
from urllib.parse import quote_plus

# third
import requests
from cachelib import FileSystemCache
from cachelib import RedisCache
# 导入 pydantic
from pydantic import validate_arguments
from pydantic import BaseModel
from pydantic import Field
from pydantic import constr
from pydantic_core import PydanticUndefined
# 导入 sqlalchemy
import sqlalchemy
from sqlalchemy import exc
from sqlalchemy import schema
from sqlalchemy import create_engine
from sqlalchemy import or_
from sqlalchemy import and_
from sqlalchemy import text
from sqlalchemy.sql import operators
from sqlalchemy.orm import Query
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm import relationship
from sqlalchemy.orm import load_only
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import selectinload
from sqlalchemy.orm.collections import InstrumentedList
from sqlalchemy.orm.attributes import AttributeEventToken
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy import event
from sqlalchemy import select
from sqlalchemy import update
from sqlalchemy import delete
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import DECIMAL
from sqlalchemy import Date
from sqlalchemy import SmallInteger
from sqlalchemy import String
from sqlalchemy import Text
from sqlalchemy import Numeric
from sqlalchemy import DateTime
from sqlalchemy import JSON
from sqlalchemy import ARRAY
from sqlalchemy import func
from sqlalchemy import UniqueConstraint
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.ext.asyncio import async_sessionmaker
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import AsyncAttrs  # 用于提供属性，特别是关系数据的异步特性

# local
_P_PATH =  os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
if _P_PATH not in sys.path:
    sys.path.append(_P_PATH)
from toolbox.common_tools import R_P_PATH
from toolbox.common_tools import logger
from toolbox.common_tools import paging_data
from toolbox.common_tools import check_env


"""
@Title:   
@File: alchemy_db.py
@Author: walle 2023年02日22日 07时08分51秒
@Version: 1.0.0
@Desc: 
"""

if sys.version_info.major <= 3 and sys.version_info.minor < 10:
    raise ValueError("python 版本必须 3.10 以上")
ENV = check_env()
logger.debug(f"当前运行环境： {ENV}")
logger.debug(f"当前 sqlalchemy 版本号： {sqlalchemy.__version__}")
__LOCAL_IOT_SERVICE = True  # IOT微服务指向本地的IOT微服务
IOT_AUTH = 'Bearer 85b5a2s5479dr3n4f6t31b4fb541739d'  # IOT微服务的授权码
LAN_AUTH = 'Bearer 94d4af8847f7c953ef2b89634e45693a'  # 网关脚本微服务的授权码
GATEWAY_AUTH = 'Bearer 10cb3e502ef52a584c92c06c161ce87b'  # Zabbix微服务的授权码
if ENV == "dev" and __LOCAL_IOT_SERVICE:
    IOT_URL = 'http://127.0.0.1:8085'        # IoT微服务
    LAN_URL = 'http://127.0.0.1:12000'   # 网关脚本微服务
    GATEWAY_URL = 'http://127.0.0.1:12002'  # Zabbix微服务
else:
    IOT_URL = 'http://iot.ihcc.com.cn:8085'
    LAN_URL = 'http://61.169.129.194:12000'
    GATEWAY_URL = 'http://61.169.129.194:12002'

__LOCAL_DB = False  # 使用本地数据库
DB_PORT = 5432
DB_NAME = "task-center"
if platform.system() == "Windows" and __LOCAL_DB:
    DB_USER = "postgres"
    DB_HOST = "127.0.0.1"
    DB_PW = "123456"
else:
    DB_USER = "yhjk"
    DB_HOST = "pgm-uf69o3rbz151899ngo.pg.rds.aliyuncs.com"
    DB_PW = "Yhjk@240914"
sync_db_url = f'postgresql+psycopg2://{DB_USER}:{quote_plus(DB_PW)}@{DB_HOST}:{DB_PORT}/{DB_NAME}'
sync_engine = create_engine(url=sync_db_url,
                    pool_recycle=86400,  # 超时回收
                    # pool_use_lifo=False,  # 后进先出（默认是先进先出）
                    pool_size=30,  # 连接池
                    max_overflow=30,  # 连接池溢出数
                    echo=False,  # 如果为真，引擎将记录所有语句以及 repr() 其参数列表的默认日志处理程序，默认为 sys.stdout 用于输出。
                    echo_pool=False,  # 如果为true，则连接池将记录信息输出
                    # pool_reset_on_return='rollback',
                    # future=True,
                    )
async_db_url = f'postgresql+asyncpg://{DB_USER}:{quote_plus(DB_PW)}@{DB_HOST}:{DB_PORT}/{DB_NAME}'
async_engine = create_async_engine(url=async_db_url,
                pool_recycle=86400,  # 超时回收
                # pool_use_lifo=False,  # 后进先出（默认是先进先出）
                pool_size=10,  # 连接池
                max_overflow=30,  # 连接池溢出数
                echo=False,  # 如果为真，引擎将记录所有语句以及 repr() 其参数列表的默认日志处理程序，默认为 sys.stdout 用于输出。
                echo_pool=False,  # 如果为true，则连接池将记录信息输出
                # pool_reset_on_return='rollback',
                # future=True,
                )
logger.info(f"psycopg2: {sync_db_url}")
logger.info(f"asyncpg: {async_db_url}")
SYNC_SESSION_FACTORY = sessionmaker(bind=sync_engine, autoflush=False, autocommit=False)
ASYNC_SESSION_FACTORY = async_sessionmaker(bind=async_engine,  # 关键
                                           class_=AsyncSession,  # 必须指定
                                           expire_on_commit=False,
                                           autoflush=False, autocommit=False)


def new_db_session(sync: bool = False) -> Union[AsyncSession, Session]:
    """
    创建新session，多线程下使用,注意：
    :param sync: 是否同步会话
    """
    if not sync:
        return ASYNC_SESSION_FACTORY()  # 关键
    else:
        return SYNC_SESSION_FACTORY() # 关键
    

class Base(AsyncAttrs, DeclarativeBase):  # 继承 AsyncAttrs
    # 定义基类 DeclarativeBase 现在可以完全替代 declarative_base 是2.0版本推荐写法，declarative_base 是1.x时代的产物
    pass   


async def init_db_by_async():
    """
    初始化数据库
    """
    # async with async_engine.begin() as conn:
    #     await conn.run_sync(AsyncBase.metadata.create_all)
    # logger.info("异步数据库初始化完成")
    async with async_engine.begin() as conn:
        await conn.run_sync(Base.metadata.create_all)
    logger.info("数据库异步方式初始化完成")


def init_db_by_sync():
    """
    初始化数据库 同步
    和异步初始化任选其一，使用本初始化函数不妨碍使用异步会话
    """
    Base.metadata.create_all(sync_engine)
    logger.info("同步数据库初始化完成")


# Base.metadata.create_all(engine)  # 放在最后

# 生产环境用


JSON_ENCODERS = { # BaseModel json编码器 
            datetime: lambda v: v.strftime("%Y-%m-%d %H:%M:%S.%f"),
            date: lambda v: v.strftime("%Y-%m-%d"),
            Decimal: lambda v: float(v),
        }


class Resp(BaseModel):
    """
    返回消息的载体
    用于统一返回体
    可以直接被fastapi返回为json格式，不过其本质上还是调用了Resp.json函数。
    """
    message: str = Field("success", title="消息")
    data: Any = Field(None, title="数据载荷")

    class Config:
        json_encoders = JSON_ENCODERS # v2版本中此项配置无效，

    def __bool__(self):
        mes = self.message
        if isinstance(mes, str) and mes.lower() == "success":
            return True
        else:
            return False

    def error(self, e_or_message: Union[Exception, str], write_log: bool = True, **kwargs) -> None:
        """
        包装错误/异常
        :param e_or_message: 一个异常类的实例或者一段提示消息
        :param write_log: 是否写入日志
        :return:
        """
        if isinstance(e_or_message, Exception):
            if write_log:
                logger.exception(e_or_message)
            message = f"{e_or_message.__class__.__name__}: {e_or_message}"
            self.message = message
        elif isinstance(e_or_message, str):
            if write_log:
                logger.error(e_or_message)
            self.message = e_or_message
        else:
            ms = "参数 e_or_message 必须是一个异常类的实例或者字符串对象: e_or_message = {}".format(
                e_or_message)
            raise ValueError(ms)
        self.data = kwargs


def common_send_request(method: str, status_code: int = 200, repackage_response: bool = True, **kwargs) -> Resp:
    """
    通用的发送请求，
    """
    resp = Resp()
    response = None
    try:
        if method.lower() == "get":
            response = requests.get(**kwargs)
        elif method.lower() == "post":
            response = requests.post(**kwargs)
        elif method.lower() == "put":
            response = requests.put(**kwargs)
        elif method.lower() == "delete":
            response = requests.delete(**kwargs)
        else:
            raise Exception(f"不支持的请求方法 {method}")
    except Exception as e:
        logger.error(e)
        resp.message = f"请求发生异常: {e}"
    finally:
        if response is not None:
            if response.status_code == status_code:
                if repackage_response:  # 使用返回体进行封装，这是为了应对返回体本身就是 Resp 类型的场景,用于避免重复嵌套
                    resp = Resp(**response.json())
                else:
                    resp.data = response.json()
            else:
                logger.error(f"请求发生{response.status_code}异常: kwargs = {kwargs}")
                resp.message = f"服务端返回了错误的状态码:{response.status_code},错误信息: {response.text}"
        else:
            pass
        return resp


@validate_arguments(config={"arbitrary_types_allowed": True})
def page(query: Query, page_num: int = 1, page_size: int = 10, 
         only: List[str] = None, ignore: List[str] = None, **kwargs) -> Resp:
    """
    为sqlalchemy 准备的分页查询函数， 注意这个 resp.data 是个Query 对象
    这是个demo，用于代码参考，没有实际意义
    :param query: _description_
    :param page_num: 页码, defaults to 1
    :param page_size: 每页数量, defaults to 10
    :param only:允许的字段， 比 ignore 优先
    :param ignore: 忽略的字段
    :raises ValueError: _description_
    :return: _description_
    """
    total = query.count()
    offset = (page_num - 1) * page_size
    query = query.offset(offset).limit(page_size)  # sqlalchemy 和 pony 对 limit函数的解释不同
    data = [x.to_dict(only=only, ignore=ignore) for x in query]
    data = paging_data(data=data, total=total, page_num=page_num, page_size=page_size)
    resp = Resp(data=data)
    return resp


class OrmTools:
    # sqlalchemy 的类型辅助工具

    @classmethod
    def get_columns(cls, only_name: bool = False) -> List[Union[Column, str]]:
        """
        返回模型定义中列的数组
        :return: _description_
        """
        data = cls.__mapper__.columns._all_columns
        if only_name:
            data = [x.name for x in data]
        return data
    
    @classmethod
    def add(cls, db_session: Session = None, **kwargs):
        db_session, need_close = (new_db_session(sync=True), True) if db_session is None else (db_session, False)
        resp = Resp()
        try:
            db_session.add(cls(**kwargs))
            db_session.commit()
        except Exception as e:
            resp.error(e)
            db_session.rollback()
        finally:
            if need_close:
                db_session.close()
            return resp
        
    @classmethod
    def edit(cls, id: int, db_session: Session = None, **kwargs)-> Resp:
        db_session, need_close = (new_db_session(sync=True), True) if db_session is None else (db_session, False)
        resp = Resp()
        kwargs = {k: v for k, v in kwargs.items() if v is not None}
        try:
            one: cls | None = db_session.query(cls).get(id)
            if one is None:
                raise Exception(f"对象(id: {id})不存在")
            for k, v in kwargs.items():
                setattr(one, k, v)
            db_session.commit()
        except Exception as e:
            resp.error(e)
            db_session.rollback()
        finally:
            if need_close:
                db_session.close()
            return resp
        
    @classmethod
    def delete(cls, ids: List[int], db_session: Session = None)-> Resp:
        db_session, need_close = (new_db_session(sync=True), True) if db_session is None else (db_session, False)
        resp = Resp()
        result = db_session.query(cls).filter(cls.id.in_(ids)).delete()
        logger.debug(f"删除了 {result} 个 {cls.__name__} 对象")
        db_session.commit()
        if need_close:
            db_session.close()
        return resp
    
    @classmethod
    def get_by_id(cls, id: int, to_plain: bool = True, db_session: Session = None)->Resp:
        db_session, need_close = (new_db_session(sync=True), True) if db_session is None else (db_session, False)
        resp = Resp()
        one: cls | None = db_session.query(cls).get(id)
        if one is None:
            resp.message(f"实例 (id: {id})不存在")
        else:
            resp.data = one.to_dict(to_plain=to_plain)
        if need_close:
            db_session.close()
        return resp
    
    def to_dict(self, to_plain: bool = False, only: List[str] = None, ignore: List[str] = None) -> dict:
        """
        实例转字典， 
        默认不处理一对一，一对多或者多对多的映射orm实例和实例列表，他们会被作为字典的值原样返回,
        如果直接让 fastapi 处理可能会在 json 的时候抛出错误， 最佳做法就是你自己处理这些问题。
        :param to_plain: 是否对 datetime，date，和 Decimal 进行转换
        :param only: _description_, 优先 only
        :param ignore: _description_, defaults to None
        :return: _description_
        """
        columns = self.__mapper__.columns._all_columns
        data = {}
        if not only or len(only) == 0:
            only = []
            if not ignore or len(ignore) == 0:
                ignore = []
        else:
            if not ignore or len(ignore) == 0:
                ignore = []
        only_empty = len(only) == 0
        ignore_empty = len(ignore) == 0
        for column in columns:
            name = column.name
            flag = False
            if only_empty:
                if ignore_empty:
                    flag = True
                else:
                    flag = False if name in ignore else True
            else:
                flag = True if name in only else False
            if flag:
                v = getattr(self, name)
                if to_plain:
                    if isinstance(v, datetime):
                        v = v.strftime('%Y-%m-%d %H:%M:%S')
                    elif isinstance(v, date):
                        v = v.strftime('%Y-%m-%d')
                    elif isinstance(v, Decimal):
                        v = float(v)

                data[name] = v
        return data



class MyBaseModel(BaseModel):
    """
    带ORM功能的 pydantic.BaseModel
    """

    class Config:
        str_strip_whitespace = True  # 去字符串前后空格
        arbitrary_types_allowed = True  # 允许任意类型
        populate_by_name = True  # 允许使用模型属性给出的名称填充别名字段
        from_attributes = True
        json_encoders = JSON_ENCODERS



REDIS_CACHE = RedisCache(host="61.169.129.194",
                         port=4999, 
                         password="8gOfXZqXGP2uczte4hRpKw4eMEhqIu7Id8ou416OtV8",
                         db=3,
                         default_timeout=3600)


class FileCacheManager:
  """
  文件系统缓存
  timeout = None 表示采用默认值
  timeout
  """
  cache: FileSystemCache

  def __init__(self) -> None:
    cache_dir = os.path.join(R_P_PATH, "cache")
    if not os.path.exists(cache_dir):
      os.makedirs(cache_dir)
    self.cache = FileSystemCache(cache_dir=cache_dir, threshold=50)

  @staticmethod
  def set(key: str, value: dict, timeout=300):
    return __FILE_CACHE__.cache.set(key, value, timeout)

  @staticmethod
  def set_many(mapping: Dict[str, Any], timeout=300):
    return __FILE_CACHE__.cache.set_many(mapping=mapping, timeout=timeout)
  
  @staticmethod
  def delete(key: str):
     return __FILE_CACHE__.cache.delete(key)
  
  @staticmethod
  def delete_many(*keys: str):
     return __FILE_CACHE__.cache.delete_many(*keys)
  
  @staticmethod
  def get(key: str):
     return __FILE_CACHE__.cache.get(key)
  
  @staticmethod
  def get_many(*keys: str):
     return __FILE_CACHE__.cache.get_many(*keys)
  

__FILE_CACHE__ = FileCacheManager()


if __name__ == '__main__':
    init_db_by_sync()
    # 这里写你的测试代码
    pass