"""
基于 SQLAlchemy 封装/总结 一些常用的访问数据相关工具
"""
from loguru import logger
from sqlalchemy import create_engine, text
from sqlalchemy.pool import QueuePool,NullPool
from urllib.parse import quote_plus as urlquote
from importlib.metadata import version


class DbHelper():
  C_user = 'user'
  C_host = 'host'
  C_port = 'port'
  C_password = 'password'
  C_db = 'db'
  C_db_file = 'dbfile'

  DB_TYPE_SQLITE = "sqlite"
  DB_TYPE_MYSQL = "mysql"
  DB_TYPE_IMPALA = "impala"

  def __init__(self, configFile0=None, db_type=None) -> None:
    super().__init__()
    self.configFile = self.init_config(configFile0)
    self.enginee = self.init_enginee(db_type)

    self.sqlalchemy_version = version('sqlalchemy')
    self.is_sqlalchemy_v2 = self.sqlalchemy_version >= '2.0'
    logger.info(f"self.sqlalchemy_version={self.sqlalchemy_version } ，  is_sqlalchemy_v2 = {self.is_sqlalchemy_v2}")

  def init_config(self, configFile0):
    if 'datasource.host' in configFile0:
      config = {
        DbHelper.C_host: configFile0['datasource.host'],
        DbHelper.C_port: int(configFile0['datasource.port']),
        DbHelper.C_user: configFile0['datasource.username'],
        DbHelper.C_password: configFile0['datasource.password'],
        DbHelper.C_db: configFile0['datasource.db']
      }
      return config
    elif 'host' in configFile0 or DbHelper.C_db_file in configFile0:
      return configFile0
    else:
      logger.warning("数据库连接配置内容不正确!")

  def init_enginee(self, db_type):
    conf = self.configFile

    def build_conn_str():
      if db_type == DB_TYPE_MYSQL:
        return "mysql+pymysql://{}:{}@{}:{}/{}".format(conf[DbHelper.C_user], urlquote(conf[DbHelper.C_password]),
                                                       conf[DbHelper.C_host], conf[DbHelper.C_port],
                                                       conf[DbHelper.C_db])

      elif db_type == DB_TYPE_SQLITE:
        return f'sqlite:///{conf[DbHelper.C_db_file]}?timezone=Asia/Shanghai'
      elif db_type == DB_TYPE_IMPALA:
        return f'impala://{conf[DbHelper.C_host]}:{conf[DbHelper.C_port]}/{conf[DbHelper.C_db]}'

    conn_str = build_conn_str()
    logger.info(f'conn_str={conn_str} , db_type = {db_type}')
    return create_engine(conn_str, poolclass=QueuePool, pool_size=5, max_overflow=10, pool_timeout=30,
                         pool_pre_ping=True,# 在执行查询前检查连接是否有效，防止连接断开导致查询失败
                         pool_recycle=3600 * 6)
    # pool_size: 连接池的大小，默认值是5
    # max_overflow: 最大溢出连接数，当连接池达到最大值后，后续的连接将会被放在连接池的溢出连接中，默认值是10
    # pool_timeout: 连接池的超时时间，默认值是30秒
    # pool_recycle: 连接生存时长，超过则该连接被回收，一般将此值改成小于mysql 的 wait_timeout=8小时，以避免长时间未请求连接自动断开

  def get_enginee(self):
    return self.enginee

  def get_connection(self):
    return self.get_enginee().connect()

  def runSql(self, sql, log=None, auto_commit=True):
    with self.get_connection() as conn:
      conn.execute(text(sql))
      if auto_commit and self.is_sqlalchemy_v2:
        conn.commit()
      if log:
        log.info("run sql success : %s " % sql)

  def runSqlWithParas(self, sqlTmpl, datas, auto_commit=True, row_to_dict=False, rs_to_list=False):
    # 示例带参数的Sql:  conn.execute(text("SELECT x, y FROM some_table WHERE y > :y"), {"y": 2})
    with self.get_connection() as conn:
      result = conn.execute(text(sqlTmpl), datas)
      if self.is_sqlalchemy_v2 and auto_commit:
        conn.commit()
      if row_to_dict and result:
        result = self.conv_rs_to_dict_generator(result)
      if rs_to_list and result:
        result = [row for row in result]
      return result

  """
    runSql4All返回的 result 是 [ row1,row2 ] , rowx 是 tuple ,
    如果要通过 fldName来取字段值，可以用以下代码：
    column_names = result.keys()
    for item in result:
      col_idx = column_names._keys.index('id')
      logger.info(item[col_idx])
  """

  def runSql4All_old(self, sql, log=None):
    with self.get_connection() as conn:
      result = conn.execute(text(sql))
      if log:
        log.info("run sql success : %s " % sql)
      return result

  def conv_rs_to_dict_generator(self, rs):
    def my_row_to_dict(row, m_colname_idx):
      data = {}
      for col_name, col_idx in m_colname_idx.items():
        data[col_name] = row[col_idx]
      return data

    column_names = rs.keys()
    m_colname_idx = {}
    for col in column_names:
      col_idx = column_names._keys.index(col)
      m_colname_idx[col] = col_idx
    return (my_row_to_dict(row, m_colname_idx) for row in rs)  # 返回一个 data_generator

  def runSql4All(self, sql, row_to_dict=False, rs_to_list=False, log=None):
    with self.get_connection() as conn:
      result = conn.execute(text(sql))
      if log:
        log.info("run sql success : %s " % sql)
      if row_to_dict:
        result = self.conv_rs_to_dict_generator(result)
      if rs_to_list:
        result = [row for row in result]
      return result

  run_sql_4result = runSql4All

  # 注意：run_sql_list_4result 在Impala中执行多个语句时，可能无法保持在同一个 Session 中执行 ， 导致以下2句的结果跟预期不一致：
  # set compression_codec=gzip
  # INSERT INTO dzy_test.smssendlog_gzip  SELECT *  FROM dzy_test.smssendlog
  def run_sql_list_4result(self, sql_list, row_to_dict=False, log=None, rs_to_list=False):
    try:
      rtns = []
      with self.get_connection() as conn:  # 用于执行出错即回滚，不出错就自动提交的场景
        for sql in sql_list:
          result = conn.execute(text(sql))
          if sql.strip().lower().startswith("select"):
            if row_to_dict:
              result = self.conv_rs_to_dict_generator(result)
            if rs_to_list:
              result = [row for row in result]
            rtns.append(result)
          else:
            rtns.append(None)
          if log:
            log.info("run sql success : %s " % sql)
          else:
            print("run sql success : %s " % sql)

        # conn.commit()
        return rtns
    except Exception as e:
      print("执行语句出错：" + sql)
    return None


DB_TYPE_SQLITE = DbHelper.DB_TYPE_SQLITE
DB_TYPE_MYSQL = DbHelper.DB_TYPE_MYSQL
DB_TYPE_IMPALA = DbHelper.DB_TYPE_IMPALA

# test code
def test_sqlite():
  db_conf = {

  }

if __name__ == '__main__':
  test_sqlite()

"""
sqlite:
得到本地的当前时间 ： select datetime('now', 'localtime');

"""
