from flask import Blueprint, render_template, request, jsonify
from app.models.neural_network import FaultDiagnosisModel
from app.utils.data_processor import DataProcessor
import torch
import numpy as np

inference_bp = Blueprint('inference', __name__, url_prefix='/inference')
model = FaultDiagnosisModel()
data_processor = DataProcessor()


@inference_bp.route('/', methods=['GET'])
def index():
    """推理页面"""
    return render_template('inference.html')


@inference_bp.route('/inference', methods=['POST'])
def inference():
    """处理推理请求"""
    try:
        # 加载模型
        if not model.load_model():
            return jsonify({
                'status': 'error',
                'message': '模型未训练'
            }), 400

        # 获取输入数据
        data = request.get_json()
        if not data or 'input' not in data:
            return jsonify({
                'status': 'error',
                'message': '缺少输入数据'
            }), 400

        # 准备数据
        input_data = data_processor.prepare_inference_data(data['input'])

        # 进行推理
        model.eval()
        with torch.no_grad():
            output = model(input_data)
            probabilities = torch.softmax(output, dim=1)
            prediction = torch.argmax(output, dim=1).item()
            confidence = probabilities[0][prediction].item()

        return jsonify({
            'status': 'success',
            'prediction': prediction,
            'confidence': float(confidence),
            'probabilities': probabilities[0].tolist()
        })
    except Exception as e:
        return jsonify({
            'status': 'error',
            'message': str(e)
        }), 400
