# app.py - 后端API服务
import torch
import numpy as np
import pandas as pd
from flask import Flask, request, jsonify, render_template
import io
import base64
import json

app = Flask(__name__)

# 模型加载
def load_model(model_path):
    class ECGTransformerClassifier(nn.Module):
        def __init__(self, input_dim=187, d_model=256, nhead=8, num_layers=4, 
                    dim_feedforward=512, output_dim=4, dropout=0.1):
            super().__init__()
            self.d_model = d_model
            self.embedding = nn.Linear(1, d_model)
            self.pos_encoder = PositionalEncoding(d_model, dropout)
            encoder_layer = nn.TransformerEncoderLayer(
                d_model=d_model,
                nhead=nhead,
                dim_feedforward=dim_feedforward,
                dropout=dropout,
                batch_first=False
            )
            self.transformer_encoder = nn.TransformerEncoder(encoder_layer, num_layers)
            self.fc_out = nn.Sequential(
                nn.Linear(d_model, d_model//2),
                nn.ReLU(),
                nn.LayerNorm(d_model//2),
                nn.Linear(d_model//2, output_dim)
            )
        
        def forward(self, src):
            src = src.unsqueeze(-1)
            src = self.embedding(src) * math.sqrt(self.d_model)
            src = self.pos_encoder(src)
            src = src.transpose(0, 1)
            memory = self.transformer_encoder(src)
            output = self.fc_out(memory.mean(dim=0))
            return output
    
    class PositionalEncoding(nn.Module):
        def __init__(self, d_model, dropout=0.1, max_len=5000):
            super().__init__()
            self.dropout = nn.Dropout(p=dropout)
            pe = torch.zeros(max_len, d_model)
            position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
            div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
            pe[:, 0::2] = torch.sin(position * div_term)
            pe[:, 1::2] = torch.cos(position * div_term)
            pe = pe.unsqueeze(0)
            self.register_buffer('pe', pe)
        
        def forward(self, x):
            x = x + self.pe[:, :x.size(1), :]
            return self.dropout(x)
    
    model = ECGTransformerClassifier().to(torch.device('cpu'))
    model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')))
    model.eval()
    return model

# 数据预处理
def preprocess_ecg(ecg_data, seq_len=187):
    # 转换为numpy数组
    ecg_array = np.array(ecg_data, dtype=np.float32)
    # 长度标准化
    if len(ecg_array) < seq_len:
        ecg_array = np.pad(ecg_array, (0, seq_len - len(ecg_array)), 'constant', constant_values=0)
    else:
        ecg_array = ecg_array[:seq_len]
    # 转换为PyTorch张量
    ecg_tensor = torch.FloatTensor(ecg_array).unsqueeze(0)  # 添加batch维度
    return ecg_tensor

# 类别映射
CLASS_MAPPING = {
    0: "正常",
    1: "心肌缺血",
    2: "心律失常",
    3: "心肌梗死"
}

# 加载模型
model = load_model('./best_model.pth')

@app.route('/')
def index():
    return render_template('index.html')

@app.route('/api/predict', methods=['POST'])
def predict():
    try:
        # 接收JSON数据
        data = request.json
        
        # 检查数据格式
        if 'ecg_data' not in data:
            return jsonify({"error": "缺少ECG数据"}), 400
            
        ecg_data = data['ecg_data']
        
        # 预处理数据
        ecg_tensor = preprocess_ecg(ecg_data)
        
        # 模型推理
        with torch.no_grad():
            output = model(ecg_tensor)
            probs = torch.softmax(output, dim=1)
            confidence, prediction = torch.max(probs, 1)
            
        # 准备结果
        result = {
            "prediction": int(prediction.item()),
            "class": CLASS_MAPPING[prediction.item()],
            "confidence": float(confidence.item()),
            "probabilities": {
                CLASS_MAPPING[i]: float(probs[0, i].item()) for i in range(4)
            }
        }
        
        return jsonify(result)
    
    except Exception as e:
        return jsonify({"error": str(e)}), 500

if __name__ == '__main__':
    app.run(debug=True, host='0.0.0.0', port=5000)