"""
数据分割器（迁移到 utils）

用于将输入数据按标签分割到不同的文件中（如果需要文件级 few-shot 回退或统计）。
注意：常规流程已使用向量库进行相似度检索，本模块不再在主流程中默认调用。
"""

import json
import logging
import os
from datetime import datetime
from typing import Dict, List, Any
from collections import defaultdict

logger = logging.getLogger(__name__)


class DataSplitter:
    """数据分割器"""

    def __init__(self, input_file: str, output_dir: str = "../data/label_data"):
        self.input_file = input_file
        self.output_dir = output_dir
        self.label_stats = defaultdict(int)
        os.makedirs(output_dir, exist_ok=True)

    def split_data_by_label(self):
        logger.info(f"开始分割数据文件: {self.input_file}")

        label_groups = defaultdict(list)
        total_count = 0
        filtered_count = 0

        with open(self.input_file, 'r', encoding='utf-8') as f:
            for line_num, line in enumerate(f, 1):
                try:
                    data = json.loads(line.strip())
                    total_count += 1
                    label_id = data.get('evaluation', {}).get('label_id')
                    label_name = data.get('evaluation', {}).get('label_name', '')
                    if label_id and label_name and label_name != "其他分类":
                        label_key = f"label_{label_id}"
                        label_groups[label_key].append(data)
                        self.label_stats[label_key] += 1
                    else:
                        filtered_count += 1
                except json.JSONDecodeError as e:
                    logger.error(f"第{line_num}行JSON解析失败: {e}")
                except Exception as e:
                    logger.error(f"处理第{line_num}行时发生错误: {e}")

        saved_files = []
        for label_key, data_list in label_groups.items():
            if data_list:
                output_file = os.path.join(self.output_dir, f"{label_key}.jsonl")
                self._save_data_to_file(output_file, data_list)
                saved_files.append(output_file)
                logger.info(f"保存标签 {label_key}: {len(data_list)} 条数据到 {output_file}")

        stats_file = os.path.join(self.output_dir, "split_stats.json")
        self._save_stats(stats_file)

        logger.info("数据分割完成！")
        logger.info(f"总数据量: {total_count}")
        logger.info(f"过滤数据量: {filtered_count}")
        logger.info(f"保存文件数: {len(saved_files)}")
        return saved_files

    def _save_data_to_file(self, output_file: str, data_list: List[Dict[str, Any]]):
        with open(output_file, 'w', encoding='utf-8') as f:
            for data in data_list:
                f.write(json.dumps(data, ensure_ascii=False) + '\n')

    def _save_stats(self, stats_file: str):
        stats = {
            "total_files": len(self.label_stats),
            "label_distribution": dict(self.label_stats),
            "timestamp": str(datetime.now())
        }
        with open(stats_file, 'w', encoding='utf-8') as f:
            json.dump(stats, f, ensure_ascii=False, indent=2)

    def get_label_stats(self) -> Dict[str, int]:
        return dict(self.label_stats)


def main():
    import argparse
    parser = argparse.ArgumentParser(description="按标签分割数据")
    parser.add_argument("--input", "-i", required=True, help="输入文件路径")
    parser.add_argument("--output", "-o", default="../data/label_data", help="输出目录")
    args = parser.parse_args()
    splitter = DataSplitter(args.input, args.output)
    splitter.split_data_by_label()


if __name__ == "__main__":
    main()


