# _*_ coding: utf-8 _*_
"""
Time:     2024/9/20
Author:   2051261
File:     cninfo_increment_where.py
"""
# 龙柏数据查询, 每次查询的增量时间为前一天

from __future__ import annotations
import logging
import typing as t
from datetime import datetime
from datetime import timedelta
from sqlalchemy.exc import OperationalError
import pymysql

from schematics import Model
from schematics.types import StringType
from schematics.types import ListType
from schematics.types import BooleanType
from schematics.types import IntType
from mine.db.db_operate import db_operate
from extractor.utils.common_func import dict_keys_to_lowercase
from extractor.flow.base_node_flow import BaseNodeFlow
from extractor.model.factory import SQLBuilderFactory
from extractor.const import ContextKey
from extractor.flow.node_model.child_node_method.modify import strip_space


logger = logging.getLogger(__name__)


if t.TYPE_CHECKING:
    from extractor.flow.task_entry import Cache
    from extractor.flow.node_model.log_node import RecordLog


class CninfoBaseSrcModel(Model):

    mod_time_column: str = StringType(required=True)
    rec_time_column: str = StringType(required=True)
    table_name: str = StringType(required=True)
    conn_name: str = StringType(required=True)
    inc_time: str = StringType()

    order_by: str = StringType()
    query_columns: list[str] = ListType(StringType())
    operator: str = StringType(default=">")
    strip_space: bool = BooleanType(default=False)
    delta_day: int = IntType(default=1)


class CninfoBaseSrcNode(BaseNodeFlow):

    def __call__(self, record_log: RecordLog, context: t.Dict[str, t.Any],
                 cache: Cache, record: t.DefaultDict, kwargs=None) ->\
            (t.Dict[str, t.Any] | t.Iterator[t.Dict[str, t.Any]] | None):

        model = CninfoBaseSrcModel(kwargs)
        inc_time = model.inc_time
        if inc_time is None:
            inc_time = (datetime.now() - timedelta(days=model.delta_day)).strftime('%Y-%m-%d')
        mod_time = model.mod_time_column
        rec_time = model.rec_time_column
        table_name = model.table_name
        conn_name = model.conn_name
        query_columns = model.query_columns if model.query_columns else "*"

        db_type = db_operate.get_db_type(conn_name)
        sql_builder = SQLBuilderFactory().get_query_builder(db_type)
        order_by = model.order_by
        operator = model.operator
        params = {
            mod_time: inc_time,
            rec_time: inc_time
        }
        if cache.get_value() is not None:
            params.update({cache.get_key(): cache.get_value()})
        sql = sql_builder.cninfo_query(table_name, mod_time, rec_time, order_by,
                                       query_columns, operator, cache.get_key())
        record_log.start({"start_time": inc_time})
        result = db_operate.fetch_all(conn_name, sql, params)
        try:
            for item in result:
                tmp = context.copy()
                if model.strip_space is True:
                    strip_space(item)
                item = await dict_keys_to_lowercase(item)
                tmp[ContextKey.DATA] = item
                yield tmp
        except OperationalError as e:
            if isinstance(e.orig, pymysql.err.OperationalError) and e.orig.args[0] == 2013:
                # 当数据源比较大清洗时间可能很长,丢失连接时,等待下次定时触发
                logger.warning(f"connection lost, e:{e}")
                return {}
            raise e
        except Exception as e:
            raise e
