import torch
from torch.optim.optimizer import Optimizer

class CustomOptimizer(Optimizer):
    def __init__(self, params, learning_rate=0.01):
        defaults = dict(learning_rate=learning_rate)
        super(CustomOptimizer, self).__init__(params, defaults)

    def step(self, closure=None):
        for group in self.param_groups:
            for p in group['params']:
                if p.grad is None:
                    continue
                grad = p.grad.data
                p.data -= group['learning_rate'] * grad
                # 打印梯度
                print("Gradient:", grad)


class GradientDescent(Optimizer):
    def __init__(self, params, lr=0.01):
        if lr < 0.0:
            raise ValueError("Invalid learning rate: {}".format(lr))
        defaults = dict(lr=lr)
        super(GradientDescent, self).__init__(params, defaults)

    def step(self, closure=None):
        loss = None
        if closure is not None:
            with torch.enable_grad():
                loss = closure()
        for group in self.param_groups:
            lr = group['lr']
            for p in group['params']:
                if p.grad is None:
                    continue
                with torch.no_grad():
                    print("p.grad\n" , p.grad)
                    p.data -= lr * p.grad
        return loss

# 示例二维向量优化问题
# 目标函数：f(x, y) = x^2 + y^2
# 目标是找到使得 f(x, y) 最小的 (x, y)
# 梯度：f'(x, y) = (2x, 2y)
# 初始值 (x, y) = (5, 3)

def objective_function(vec):
    x, y = vec
    return x**2 + y**2

# 初始参数和优化器
params = torch.tensor([5.0, 3.0], requires_grad=True)
optimizer = GradientDescent([params], lr=0.1)

# 迭代优化过程
for i in range(100):
    # 计算目标函数值
    loss = objective_function(params)
    
    # 计算梯度
    loss.backward()
    
    # 打印梯度
    print("Iteration:", i, "Gradients:", params.grad.data.numpy())
    
    # 打印优化过程
    print("Iteration:", i, "Loss:", loss.item(), "x:", params[0].item(), "y:", params[1].item())
    
    # 使用优化器更新参数
    optimizer.step()
    
    # 梯度清零
    params.grad.zero_()

print("Optimized x:", params[0].item(), "Optimized y:", params[1].item())





