#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import os
import json
import logging
from typing import Any, Dict, List
from config import LOG_LEVEL, LOG_FORMAT, LOG_FILE

_LOGGING_CONFIGURED = False

def setup_logging(name: str = "pr_data_constructor") -> logging.Logger:
    """设置日志配置（单例初始化root，子logger复用handler）"""
    global _LOGGING_CONFIGURED

    if not _LOGGING_CONFIGURED:
        root_logger = logging.getLogger()
        root_logger.setLevel(getattr(logging, LOG_LEVEL))

        # 避免重复添加handler
        if not root_logger.handlers:
            formatter = logging.Formatter(LOG_FORMAT)

            # 控制台输出
            stream_handler = logging.StreamHandler()
            stream_handler.setFormatter(formatter)
            root_logger.addHandler(stream_handler)

            # 文件输出
            try:
                log_dir = os.path.dirname(LOG_FILE)
                if log_dir:
                    os.makedirs(log_dir, exist_ok=True)
                file_handler = logging.FileHandler(LOG_FILE, encoding='utf-8')
                file_handler.setFormatter(formatter)
                root_logger.addHandler(file_handler)
            except Exception:
                # 文件日志失败时不阻断
                pass

        _LOGGING_CONFIGURED = True

    return logging.getLogger(name)

def ensure_dir(path: str) -> None:
    """确保目录存在，不存在则创建"""
    os.makedirs(path, exist_ok=True)

def save_json(data: Any, filepath: str, ensure_ascii: bool = False) -> None:
    """保存数据为JSON文件"""
    ensure_dir(os.path.dirname(filepath))
    with open(filepath, 'w', encoding='utf-8') as f:
        json.dump(data, f, ensure_ascii=ensure_ascii, indent=2)

def load_json(filepath: str) -> Any:
    """加载JSON文件"""
    with open(filepath, 'r', encoding='utf-8') as f:
        return json.load(f)

def append_json(data: Any, filepath: str, ensure_ascii: bool = False) -> None:
    """追加数据到JSON文件（如果文件不存在则创建）"""
    ensure_dir(os.path.dirname(filepath))
    
    if not os.path.exists(filepath) or os.path.getsize(filepath) == 0:
        with open(filepath, 'w', encoding='utf-8') as f:
            json.dump([data], f, ensure_ascii=ensure_ascii, indent=2)
    else:
        with open(filepath, 'r', encoding='utf-8') as f:
            try:
                existing_data = json.load(f)
            except Exception:
                existing_data = []
        
        existing_data.append(data)
        with open(filepath, 'w', encoding='utf-8') as f:
            json.dump(existing_data, f, ensure_ascii=ensure_ascii, indent=2)

def save_jsonl(data_list: List[Dict], filepath: str, ensure_ascii: bool = False) -> None:
    """保存数据为JSONL格式（每行一个JSON对象）"""
    ensure_dir(os.path.dirname(filepath))
    with open(filepath, 'w', encoding='utf-8') as f:
        for item in data_list:
            f.write(json.dumps(item, ensure_ascii=ensure_ascii) + '\n')

def load_jsonl(filepath: str) -> List[Dict]:
    """加载JSONL文件"""
    data_list = []
    with open(filepath, 'r', encoding='utf-8') as f:
        for line in f:
            if line.strip():
                data_list.append(json.loads(line))
    return data_list 

def append_batch_to_json(batch, filepath, ensure_ascii: bool = False):
    """将一批数据追加到标准json数组文件（自动创建/合并/保存）"""
    import os, json
    ensure_dir(os.path.dirname(filepath))
    if os.path.exists(filepath):
        with open(filepath, "r", encoding="utf-8") as f:
            data = json.load(f)
    else:
        data = []
    data.extend(batch)
    with open(filepath, "w", encoding="utf-8") as f:
        json.dump(data, f, ensure_ascii=ensure_ascii, indent=2)

def find_latest_file(directory, pattern):
    """查找目录下最新的文件（按pattern匹配）"""
    import os, glob
    if not os.path.exists(directory):
        return None
    files = glob.glob(os.path.join(directory, pattern))
    if not files:
        return None
    return max(files, key=os.path.getmtime) 

def append_jsonl_line(item: Dict, filepath: str, ensure_ascii: bool = False) -> None:
    """以追加方式写入一条JSONL记录（自动创建目录）"""
    ensure_dir(os.path.dirname(filepath))
    with open(filepath, 'a', encoding='utf-8') as f:
        f.write(json.dumps(item, ensure_ascii=ensure_ascii) + '\n')