# 本模型基于torch自带的resnet18, 并为其新增一个特殊的forward方法
import torch
from torch import nn
from torchvision.models import resnet18

class resnet18_mapping(nn.Module):
    def __init__(self):
        super(resnet18_mapping, self).__init__()
        model = resnet18().cuda()
        model.eval()
        self.model = model
        layers = []
        layers.append(["c0", model.conv1    ])
        layers.append(["b0", model.bn1      ])
        layers.append(["r0", model.relu     ])
        layers.append(["p0", model.maxpool  ])
        layers.append(["layer0.0", model.layer1.__getattr__("0")])
        layers.append(["layer0.1", model.layer1.__getattr__("1")])
        layers.append(["layer1.0", model.layer2.__getattr__("0")])
        layers.append(["layer1.1", model.layer2.__getattr__("1")])
        layers.append(["layer2.0", model.layer3.__getattr__("0")])
        layers.append(["layer2.1", model.layer3.__getattr__("1")])
        layers.append(["layer3.0", model.layer4.__getattr__("0")])
        layers.append(["layer3.1", model.layer4.__getattr__("1")])
        layers.append(["p1", model.avgpool  ])
        layers.append(["flatten", torch.nn.Flatten()]) # 手动添加flatten层
        layers.append(["fc", model.fc])
        self.layers = layers
    def forward(self, x):
        for k in self.layers:
            x = k[1](x)
        return x
    
    def timming(self, x):
        import time
        # warm up 
        for _ in range(5):
            self.forward(x)
        # timeing
        torch.cuda.synchronize()
        starter, ender = torch.cuda.Event(enable_timing=True), torch.cuda.Event(enable_timing=True)
        for k in self.layers:
            starter.record()
            x = k[1](x)
            ender.record()
            torch.cuda.synchronize() # 等待GPU任务完成
            curr_time = starter.elapsed_time(ender) # 从 starter 到 ender 之间用时,单位为毫秒
            print("{0: >10s}\t{1: >10.4f}(ms)".format(k[0], curr_time))
if __name__ == "__main__":
    a = torch.randn(1, 3, 224, 224).cuda()
    n = resnet18_mapping()
    n(a)
    n.timming(a)