import torch
import torch_npu
# from vllm.platforms import current_platform
import torch.nn as nn
import dataclasses
from typing import Optional, Any


@dataclasses.dataclass
class ACLGraphEntry:
    batch_descriptor = None #: # BatchDescriptor
    aclgraph: Optional[torch.npu.NPUGraph] = None
    output: Optional[Any] = None

    # for aclgraph debugging, track the input addresses
    # during capture, and check if they are the same during replay
    input_addresses: Optional[list[int]] = None

class MyModel(nn.Module):
    def __init__(self):
        super().__init__()
        self.fc1 = nn.Linear(128, 256)
        self.relu = nn.ReLU()
        self.fc2 = nn.Linear(256, 10)

    def forward(self, x):
        x = self.fc1(x)
        x = self.relu(x)
        x = self.fc2(x)
        return x
    

aclgraph = torch.npu.NPUGraph()
# graph_pool = current_platform.get_global_graph_pool()
graph_pool_handle = torch.npu.graph_pool_handle()
x = torch.randn(32, 128).npu()  # batch of 32 samples
model = MyModel().npu()
with torch.npu.graph(aclgraph, pool=graph_pool_handle):
    output = model(x)
print(f"capture: x= {x}")    
print(f"capture: output={output}")
x.copy_(torch.zeros(32, 128))
print(f"replay: x= {x}")    
aclgraph.replay()
print(f"replay: output={output}")
print("end")