# -*- coding: utf-8 -*-            
# @Time : 2025/6/30 17:12
# @FileName: utils.py
# @Target:
import json, codecs, time, os, logging, datetime, threading, requests
from pprint import pprint
from tqdm import tqdm
from logging.handlers import RotatingFileHandler

import psycopg2
from psycopg2 import sql

from llm import quality_analyze
from llm import cuzu
from llm import tool

from obs import ObsClient

def makedirs():
    def check_saved_folder(saved_folder):
        if os.path.exists(saved_folder) and os.path.isdir(saved_folder):
            ...
        else:
            os.makedirs(saved_folder)

    # 创建存储数据的wjj
    check_saved_folder(os.getenv('MAIXUN_DATA_SAVED_PATH', 'data_saved'))
    # 创建存储日期的文件夹
    check_saved_folder('logs')
    # 创建任务调度隐藏文件夹
    check_saved_folder('./tmp')

    # 创建保存相关配置的文件夹
    check_saved_folder('configs')

    for item in os.listdir('./tmp'):
        os.remove(os.path.join('./tmp', item))

def set_logger(filename):
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')

    def setRotatingRootLogger(filename, level=logging.INFO):
        handler = RotatingFileHandler(filename, encoding='UTF-8', maxBytes=1000000, backupCount=5)
        handler.setFormatter(formatter)
        logger = logging.getLogger()
        logger.setLevel(level)
        logger.addHandler(handler)

    setRotatingRootLogger(filename)


makedirs()
set_logger('logs/fetch_preprocess.log')


class OutterConfig:
    def __init__(self):
        self.FID2INFO = self.get_fidInfo()
        self.quality_labels = self.get_quality_labels()
        self.cuzu_labels = self.get_cuzu_labels()
        self.write_fid_locally()
        self.mx_domain_settings = self.get_mx_domains()
    def write_fid_locally(self):
        with codecs.open(filename='configs/fid2info.json', mode='w',
                         encoding='utf-8') as fw:
            json.dump(obj=self.FID2INFO, fp=fw, ensure_ascii=False, indent=4)

    def get_need_preprocess_fids(self):
        '''
        如果全量的数据两个GPU处理不完的话,后续有选择性的处理数据
        '''
        with codecs.open(filename='configs/preprocessed_fid2info.json', mode='r',
                         encoding='utf-8') as fr:
            _ = json.load(fr)
        return list(_.keys())


    def get_fidInfo(self):
        # res = requests.get('http://106.75.44.18:7800/Maixun_fid_info')
        # res = res.json()
        # FID2INFO = {}
        # for key, value in res.items():
        #     FID2INFO[int(key)] = value
        # return FID2INFO

        with codecs.open(filename='configs/preprocessed_fid2info.json', mode='r',
                         encoding='utf-8') as fr:
            _ = json.load(fr)
        FID2INFO = {}
        for key, value in _.items():
            FID2INFO[int(key)] = value
        return FID2INFO


    def get_mx_domains(self):
        res = requests.get('http://106.75.44.18:7800/maixun_domain_mapping')
        return res.json()


    def fid_list(self):
        return list(self.FID2INFO.keys())

    def fid2ModelName(self, fid):
        if isinstance(fid, int):
            return self.FID2INFO[fid]
        else:
            try:
                fid = int(fid)
                return self.FID2INFO[fid]
            except Exception as e:
                logging.error(f'当前的 {fid} 不存在')
                return ''

    def get_quality_labels(self):
        res = requests.get('http://106.75.44.18:7800/quality_labels')
        return res.json()

    def get_cuzu_labels(self):
        res = requests.get('http://106.75.44.18:7800/feeling_labels')
        return res.json()

    def get_yesterday_list(self):
        yesterday = datetime.date.today() - datetime.timedelta(days=1)
        yesterday = yesterday.strftime("%Y-%m-%d")
        return [yesterday]

    def generate_month_dates(self, year, month):
        """
        获取指定月份的日期数据，并确保所有的日期小于当天
        """
        # 当月第一天
        start_date = datetime.datetime(year, month, 1)

        # 下个月第一天
        if month == 12:
            next_month = datetime.datetime(year + 1, 1, 1)
        else:
            next_month = datetime.datetime(year, month + 1, 1)

        # 生成日期列表
        dates = []
        current = start_date
        while current < next_month and current < datetime.datetime.now():
            dates.append(current.strftime("%Y-%m-%d"))
            current += datetime.timedelta(days=1)
        return dates

    @property
    def db_name(self):
        return 'dev'

    @property
    def db_user(self):
        return 'liuyang'

    @property
    def db_pw(self):
        return 'liuyang177'

    @property
    def db_host(self):
        # return '165.154.125.144'
        return '10.7.19.75'


    @property
    def db_port(self):
        return 5432


    @property
    def obs_ak(self):
        HUAWEI_OBS_AK = "HPUA1GZV2JCGWS83QGK3"
        return HUAWEI_OBS_AK

    @property
    def obs_sk(self):
        HUAWEI_OBS_SK = "tnEoaYeIQcPOtFO6pVe9g6PlD4sp7w3zWXOIyNTM"
        return HUAWEI_OBS_SK

    @property
    def obs_endpoint(self):
        HUAWEI_OBS_ENDPOINT = "https://obs.cn-north-4.myhuaweicloud.com/"
        return HUAWEI_OBS_ENDPOINT

    @property
    def prefix_path(self):
        prefix_path = "processed_data_downstream_folder"
        return prefix_path

    @property
    def bucket_name(self):
        bucket_name = "obs-voc-cssl-post-data-qa"
        return bucket_name

    @property
    def translate_url(self):
        return 'http://152.32.135.145:17300/translate'

    @property
    def translate_sk(self):
        return 'liuyang177'

    @property
    def data_process_url(self):
        return 'http://106.75.44.18:7900'

    @property
    def data_saved_url(self):
        return 'http://0.0.0.0:8000'

    @property
    def num_threads(self):
        return 25

    def str2int(self, fid):
        if isinstance(fid, int):
            return fid
        else:
            try:
                fid = int(fid)
                return fid
            except Exception as e:
                logging.info(f'当前的fid无法转换 - {fid}')
                return 0


    @property
    def fetch_lock_file(self):
        return "./tmp/obs_task.lock"

    @property
    def process_lock_file(self):
        return "./tmp/preprocess_task.lock"

    def mxUrlSplit(self, url: str, if_comment: int) -> str:
        if if_comment:
            if '?mx_comment_id' in url:
                return url.split('?mx_comment_id')[0]
            else:
                return url
        else:
            return url

    def mx_domain2websites(self, mx_domain):
        if mx_domain in outter_config.mx_domain_settings:
            return outter_config.mx_domain_settings[mx_domain]
        else:
            return mx_domain


outter_config = OutterConfig()


class PG:
    '''
    封装数据库操作
    '''

    def __init__(self):
        self.host = outter_config.db_host
        self.port = outter_config.db_port
        self.user = outter_config.db_user
        self.passwd = outter_config.db_pw
        self.db = outter_config.db_name

    def create_files_table(self):
        """创建文件处理状态表"""
        conn = self.get_db_connection()
        with conn.cursor() as cur:
            cur.execute("""
                CREATE TABLE IF NOT EXISTS files (
                    id SERIAL PRIMARY KEY,
                    filename TEXT UNIQUE NOT NULL,
                    is_processed BOOLEAN DEFAULT FALSE,
                    OEM VARCHAR(50) DEFAULT '',
                    ModelName VARCHAR(50) DEFAULT '',
                    fid INT DEFAULT 0,
                    fid_date VARCHAR(20) DEFAULT ''
                )
            """)
            conn.commit()
        conn.close()
        logging.info("文件表创建完成")

    def get_db_connection(self):
        """获取数据库连接"""

        try:
            conn = psycopg2.connect(
                host=self.host,
                port=self.port,
                user=self.user,
                password=self.passwd,
                database=self.db
            )
            return conn
        except psycopg2.OperationalError as e:
            logging.error(f"数据库连接失败: {e}")
            raise

    def check_and_insert_file(self, filename, fid, day, oem, modelname):
        """检查文件是否存在并插入"""
        conn = self.get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("SELECT is_processed FROM files WHERE filename = %s", (filename,))
                result = cur.fetchone()
                if result is None:
                    cur.execute("INSERT INTO files (filename, oem, modelname, fid, fid_date) VALUES (%s, %s, %s, %s, %s)", (filename, oem, modelname, fid, day))
                    conn.commit()
                    logging.info(f"新增文件记录: {filename}")
                else:
                    logging.debug(f"文件已存在: {filename}")
        except Exception as e:
            logging.error(f"处理文件记录失败: {e}")
            conn.rollback()
        finally:
            conn.close()

    def get_unprocessed_files(self):
        """获取未处理的文件列表"""
        fids = outter_config.get_need_preprocess_fids()

        conn = self.get_db_connection()
        try:
            files = []
            with conn.cursor() as cur:
                query = "SELECT filename, fid FROM files WHERE is_processed = FALSE   LIMIT %s"
                # cur.execute(query, (outter_config.num_threads,))
                cur.execute(query, (200,))
                for row in cur.fetchall():
                    row_fid = row[1]
                    if row_fid not in fids :
                        files.append(row[0])
                return files
        except Exception as e:
            logging.error(f"获取未处理文件失败: {e}")
            return []
        finally:
            conn.close()

    def mark_as_processed(self, filename):
        """标记文件为已处理"""
        conn = self.get_db_connection()
        try:
            with conn.cursor() as cur:
                cur.execute("UPDATE files SET is_processed = TRUE WHERE filename = %s", (filename,))
                conn.commit()
                logging.info(f"文件标记为已下载: {filename}")
        except Exception as e:
            logging.error(f"下载状态失败: {filename}, 错误: {e}")
            conn.rollback()
        finally:
            conn.close()


pg = PG()


class ObsClientWrapper:
    def __init__(self):
        self.obs_ak = outter_config.obs_ak
        self.obs_sk = outter_config.obs_sk
        self.obs_endpoint = outter_config.obs_endpoint
        self.max_retries = 3
        self.retry_interval = 5
        self._obs_client = None
        self._lock = threading.Lock()


    def reconnect(self):
        """重建OBS客户端连接"""
        with self._lock:
            try:
                if self._obs_client:
                    self._obs_client.close()
                self._obs_client = ObsClient(
                    access_key_id=self.obs_ak,
                    secret_access_key=self.obs_sk,
                    server=self.obs_endpoint
                )
                logging.info("OBS连接重建成功")
            except Exception as e:
                logging.error(f"OBS连接异常: {e}")
                raise


    def list_objects(self, bucket_name, prefix=None, tail_filename='.new.jsonl'):
        """
        封装 listObjects 方法，支持自动重试和重连
        :return: 成功的响应对象
        """
        object_keys = set()
        for attempt in range(self.max_retries + 1):
            try:
                response = self._obs_client.listObjects(
                    bucket_name, prefix=prefix
                )
                if response.status < 300:
                    for content in response.body.contents :
                        object_key = content.key
                        if object_key[-10:] != tail_filename:
                            ...
                        else:
                            object_keys.add(object_key)

                else:
                    logging.warning(f"OBS listObjects 响应状态码: {response.status}, 重试第 {attempt+1} 次")
                    if response.status >= 500 and attempt < self.max_retries:
                        self.reconnect()
                        time.sleep(self.retry_interval)
                    else:
                        raise Exception(f"listObjects 调用失败: {response.status}")
                return list(object_keys)
            except Exception as e:
                logging.error(f"未知异常: {e}, 重试第 {attempt+1} 次")
                if attempt < self.max_retries:
                    self.reconnect()
                    time.sleep(self.retry_interval)
                else:
                    raise

    def get_object(self, bucket_name, object_key, load_stream_in_memory=True):
        """
        封装 getObject 方法，支持自动重试和重连
        :return: 成功的响应对象
        """
        for attempt in range(self.max_retries + 1):
            try:
                response = self._obs_client.getObject(
                    bucket_name,
                    object_key,
                    loadStreamInMemory=load_stream_in_memory
                )
                if response.status < 300:
                    return response
                else:
                    logging.warning(f"OBS getObject 响应状态码: {response.status}, 重试第 {attempt+1} 次")
                    if response.status >= 500 and attempt < self.max_retries:
                        self.reconnect()
                        time.sleep(self.retry_interval)
                    else:
                        raise Exception(f"getObject 调用失败: {response.status}")

            except Exception as e:
                logging.error(f"未知异常: {e}, 重试第 {attempt+1} 次")
                if attempt < self.max_retries:
                    self.reconnect()
                    time.sleep(self.retry_interval)
                else:
                    raise

obs_wrapper = ObsClientWrapper()




def if_complaints_exists(title_zh, text_zh):
    '''
    判断当前是否存在对应的客户抱怨
    '''
    customer_voice = title_zh + text_zh
    if len(customer_voice):
        if_complaint = quality_analyze.if_quality_complaint(
            customer_voice=customer_voice
        )
        if if_complaint == '是':
            return True
        else:
            return False
    else:
        return False


def purchase_year_analyze(
        title_zh,
        text_zh,
        current_date=''
):
    customer_voice = title_zh + text_zh
    purchase_year = tool.purchase_year(
        customer_voice=customer_voice,
        content=current_date
    )
    return purchase_year


class QualityLabel2IntValue:
    def __init__(self):
        self._quality_labels_1_grade = {}
        self._quality_labels_2_grade = {}
        self._quality_labels_3_grade = {}
        for ITEMS_LIST in outter_config.quality_labels:
            quality_label_1 = ITEMS_LIST['quality_label_1']
            quality_label_1_int_value = ITEMS_LIST['int_value']
            self._quality_labels_1_grade[quality_label_1] = quality_label_1_int_value
            ITEMS = ITEMS_LIST['items']
            for ITEM in ITEMS:
                quality_label_2 = ITEM['quality_label_2']
                quality_label_2_int_value = ITEM['int_value']
                self._quality_labels_2_grade[quality_label_2] = quality_label_2_int_value
                items = ITEM['items']
                for item in items:
                    quality_label_3 = item['quality_label_3']
                    quality_label_3_int_value = item['int_value']
                    self._quality_labels_3_grade[quality_label_3] = quality_label_3_int_value

    @property
    def quality_labels_1_grade(self):
        return self._quality_labels_1_grade

    @property
    def quality_labels_2_grade(self):
        return self._quality_labels_2_grade

    @property
    def quality_labels_3_grade(self):
        return self._quality_labels_3_grade


quality_label2int_value = QualityLabel2IntValue()


def sentiment2intvalue(sentiment) -> int:
    if isinstance(sentiment, str):
        try:
            sentiment = int(sentiment[-1])
            return sentiment
        except Exception as e:
            return 3
    else:
        return 3


def quality_label2int_label(_complaint_tags):
    '''
    将Quality Label从文字转换为纯数字
    '''
    complaint_tags = []
    for tag in _complaint_tags:
        quality_1 = tag['quality_1']
        quality_2 = tag['quality_2']
        quality_3 = tag['quality_3']

        quality_1_int_value = quality_label2int_value.quality_labels_1_grade[quality_1]
        quality_2_int_value = quality_label2int_value.quality_labels_2_grade[quality_2]
        quality_3_int_value = quality_label2int_value.quality_labels_3_grade[quality_3]

        content_list_zh = tag['content_list']
        content_list_en = []

        for content in content_list_zh:
            content_en = tool.translate(content)
            content_list_en.append(content_en)
        sentiment = sentiment2intvalue(tag['sentiment'])
        complaint_tags.append({
            'quality_1': quality_1,
            'quality_2': quality_2,
            'quality_3': quality_3,
            'quality_1_int_value': quality_1_int_value,
            'quality_2_int_value': quality_2_int_value,
            'quality_3_int_value': quality_3_int_value,
            'content_list_zh': content_list_zh,
            'content_list_en': content_list_en,
            'sentiment': sentiment
        })
    return complaint_tags


def quality_preprocessing(title_zh, text_zh):
    customer_voice = title_zh + text_zh
    _complaint_tags = quality_analyze.complaint_analysis(
        customer_voice=customer_voice
    )
    complaint_tags = quality_label2int_label(_complaint_tags)
    return complaint_tags


#######################
# 接下来将对应 Customer Satisfaction 对应的内容放在下面
class CuZuLabel2IntValue:
    def __init__(self):
        self._cuzu_labels_1_grade = {}
        self._cuzu_labels_2_grade = {}
        for ITEMS_DICT in outter_config.cuzu_labels:
            cuzu_label_1 = ITEMS_DICT['cuzu_label_1']
            cuzu_label_1_int_value = ITEMS_DICT['int_value']
            self._cuzu_labels_1_grade[cuzu_label_1] = cuzu_label_1_int_value
            ITEMS = ITEMS_DICT['items']
            for ITEM in ITEMS:
                cuzu_label_2 = ITEM['cuzu_label_2']
                cuzu_label_2_int_value = ITEM['int_value']
                self._cuzu_labels_2_grade[cuzu_label_2] = cuzu_label_2_int_value

    @property
    def cuzu_labels_1_grade(self):
        return self._cuzu_labels_1_grade

    @property
    def cuzu_labels_2_grade(self):
        return self._cuzu_labels_2_grade


cuzulabel2intvalue = CuZuLabel2IntValue()


def cuzu_label2int_label(_cuzu_tags):
    '''
    将Quality Label从文字转换为纯数字
    '''
    cuzu_tags = []
    for tag in _cuzu_tags:
        feel_1 = tag['feel_1']
        feel_2 = tag['feel_2']

        feel_1_int_value = cuzulabel2intvalue.cuzu_labels_1_grade[feel_1]
        feel_2_int_value = cuzulabel2intvalue.cuzu_labels_2_grade[feel_2]

        content_list_zh = tag['content_list']
        content_list_en = []

        for content in content_list_zh:
            content_en = tool.translate(content)
            content_list_en.append(content_en)
        sentiment = sentiment2intvalue(tag['sentiment'][0])
        cuzu_tags.append({
            'feel_1': feel_1,
            'feel_2': feel_2,
            'feel_1_int_value': feel_1_int_value,
            'feel_2_int_value': feel_2_int_value,
            'content_list_zh': content_list_zh,
            'content_list_en': content_list_en,
            'sentiment': sentiment
        })
    return cuzu_tags


def cuzu_preprocessing(title_zh, text_zh, vehicle):
    customer_voice = title_zh + text_zh
    _cuzu_tags = cuzu.feel_analysis_withvehicle(
        customer_voice=customer_voice,
        vehicle=vehicle
    )
    cuzu_tags = cuzu_label2int_label(_cuzu_tags)
    return cuzu_tags


import os
import time
from typing import Optional, TextIO


class RotatingFileWriter:
    def __init__(self, base_name: str = "data", max_size_mb: int = 1, dir_path: str = "."):
        """
        单个文件记录器，按大小限制自动切换文件

        :param base_name: 基础文件名前缀（如 "data"）
        :param max_size_mb: 单个文件最大大小（单位：MB）
        :param dir_path: 存储目录路径
        """
        self.base_name = base_name
        self.max_size_bytes = max_size_mb * 1024 * 1024  # 转换为字节
        self.dir_path = dir_path
        self.current_file_index = 0
        self.current_file: Optional[TextIO] = None
        self.current_file_size = 0

        # 创建目录（如果不存在）
        os.makedirs(self.dir_path, exist_ok=True)

        # 初始化第一个文件
        self._create_new_file()

    def _create_new_file(self):
        """创建新文件并重置大小计数器"""
        if self.current_file:
            self.current_file.close()

        file_name = f"{self.base_name}_{self.current_file_index:03d}.txt"
        self.current_file_path = os.path.join(self.dir_path, file_name)
        self.current_file = open(self.current_file_path, "a", encoding="utf-8")
        self.current_file_index += 1
        self.current_file_size = os.path.getsize(self.current_file_path) if os.path.exists(
            self.current_file_path) else 0

    def write(self, data: str):
        """
        写入数据到当前文件，自动切换文件

        :param data: 要写入的数据（字符串）
        """
        data_size = len(data.encode("utf-8"))  # 计算数据大小（字节）
        if self.current_file_size + data_size >= self.max_size_bytes:
            self._create_new_file()  # 文件大小超过限制，切换新文件

        self.current_file.write(data)
        self.current_file.flush()  # 确保数据立即写入磁盘
        self.current_file_size += data_size

    def close(self):
        """关闭当前文件"""
        if self.current_file:
            self.current_file.close()


class DailyRotatingFileWriter:
    def __init__(self, base_name: str = "maixun_data", max_size_mb: int = 5, root_dir: str = "./data_saved"):
        """
        按天分割文件记录器

        :param base_name: 基础文件名前缀（如 "data"）
        :param max_size_mb: 单个文件最大大小（单位：MB）
        :param root_dir: 根目录（如 "logs"）
        """
        self.base_name = base_name
        self.max_size_mb = max_size_mb
        self.root_dir = root_dir
        self.current_date = None
        self.writer: Optional[RotatingFileWriter] = None

    def _ensure_directory(self, current_date):
        """创建当天的文件夹（如 logs/2023-10-05）"""
        date_dir = os.path.join(self.root_dir, current_date)
        os.makedirs(date_dir, exist_ok=True)
        return date_dir

    def write(self, data: str, current_date : str):
        """
        写入数据到当天的文件夹中，自动切换文件和日期
        :param data: 要写入的数据（字符串）
        """
        current_date = current_date
        if self.current_date != current_date:
            # 日期变更，关闭旧记录器并创建新记录器
            if self.writer:
                self.writer.close()
            self.current_date = current_date
            date_dir = self._ensure_directory(current_date)
            self.writer = RotatingFileWriter(
                base_name=self.base_name,
                max_size_mb=self.max_size_mb,
                dir_path=date_dir
            )
        self.writer.write(data)
        self.writer.write('\r\n')

    def close(self):
        """关闭当前记录器"""
        if self.writer:
            self.writer.close()


writer = DailyRotatingFileWriter()


