import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import torch
import torch.optim as optim
from functools import partial

def gradient_clipping_example():
    """梯度裁剪示例：防止梯度爆炸"""
    
    model = torch.nn.LSTM(input_size=100, hidden_size=50, num_layers=3)
    optimizer = optim.Adam(model.parameters(), lr=0.001)
    
    # 训练循环中的梯度裁剪
    for batch_idx, (data, target) in enumerate(dataloader):
        optimizer.zero_grad()
        output = model(data)
        loss = criterion(output, target)
        loss.backward()
        
        # 梯度裁剪：限制梯度范数不超过1.0
        torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=1.0)
        
        optimizer.step()

gradient_clipping_example()