# 推理入口脚本
import logging

import yaml
import torch
from src.model.bert_wrapper import load_bert_model
from src.inference.predictor import BertPredictor
from src.utils.logger import get_logger
import warnings
warnings.filterwarnings("ignore")

import os
from dotenv import load_dotenv
load_dotenv('/tmp/code/bert_fine_tuning/.env')
logging.info(os.getenv('PYTHONPATH'))


def main():
    # 初始化日志
    logger = get_logger('predict_script')
    logger.info('开始推理流程')

    # 加载配置
    with open('config/base.yaml', 'r') as f:
        base_config = yaml.safe_load(f)
    with open('config/inference.yaml', 'r') as f:
        inference_config = yaml.safe_load(f)

    # 加载模型
    logger.info(f'加载模型: {base_config['model_name']}')
    model = load_bert_model(
        'models/finetuned/latest',  # 使用微调后的模型
        task_type=base_config['task_type'],
        num_labels=base_config['num_labels'],
        use_quantization=base_config.get('use_quantization', False),
        from_scratch=base_config.get('from_scratch', False)
    )

    # 创建预测器
    predictor = BertPredictor(model, base_config['model_name'])

    # 测试预测
    test_text = '这是一个测试文本'
    result = predictor.predict(test_text)
    logger.info(f'预测结果: {result}')


if __name__ == '__main__':
    main()