import torch
from datasets import load_from_disk
from transformers import AutoTokenizer
from pathlib import Path

from configuration import config
from model.bert_classifier import Classifier
from runner.predict import predict_text

# -------------------------------
# 全局变量（单例式加载，避免重复初始化）
# -------------------------------
_device = None
_model = None
_tokenizer = None
_label_feature = None


def get_device():
    """获取设备（优先使用 GPU）"""
    global _device
    if _device is None:
        _device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
        print(f"✅ 使用设备: {_device}")
    return _device


def get_model():
    """懒加载模型"""
    global _model
    if _model is None:
        print("🧠 正在加载模型...")
        try:
            model_path = config.MODELS_DIR / "model.pt"
            if not model_path.exists():
                raise FileNotFoundError(f"模型文件未找到: {model_path}")

            _model = Classifier().to(get_device())
            state_dict = torch.load(model_path, map_location=get_device())
            _model.load_state_dict(state_dict)
            _model.eval()  # 推理模式
            print("✅ 模型加载完成！")
        except Exception as e:
            print(f"❌ 模型加载失败: {e}")
            raise
    return _model


def get_tokenizer():
    """懒加载 Tokenizer"""
    global _tokenizer
    if _tokenizer is None:
        print("🔤 正在加载 Tokenizer...")
        try:
            tokenizer_path = config.PRE_TRAINED_DIR / "bert-base-chinese"
            if not tokenizer_path.exists():
                raise FileNotFoundError(f"Tokenizer 路径不存在: {tokenizer_path}")

            _tokenizer = AutoTokenizer.from_pretrained(str(tokenizer_path))
            print("✅ Tokenizer 加载完成！")
        except Exception as e:
            print(f"❌ Tokenizer 加载失败: {e}")
            raise
    return _tokenizer


def get_label_feature():
    """懒加载标签映射"""
    global _label_feature
    if _label_feature is None:
        print("🏷️ 正在加载标签映射...")
        try:
            dataset_path = config.PROCESSED_DATA_DIR / 'train'
            if not dataset_path.exists():
                raise FileNotFoundError(f"数据集路径不存在: {dataset_path}")

            dataset = load_from_disk(str(dataset_path))
            _label_feature = dataset.features['label']
            print("✅ 标签映射加载完成！")
        except Exception as e:
            print(f"❌ 标签映射加载失败: {e}")
            raise
    return _label_feature


def predict_service(text: str):
    """
    预测服务入口函数
    :param text: 输入文本
    :return: (pred_id: int, pred_label: str)
    """
    try:
        model = get_model()
        tokenizer = get_tokenizer()
        device = get_device()
        label_feature = get_label_feature()

        return predict_text(text, model, tokenizer, device, label_feature)
    except Exception as e:
        print(f"❌ 预测执行失败: {str(e)}")
        raise RuntimeError(f"预测失败: {str(e)}") from e

