# Copyright (c) Soumith Chintala 2016,
# All rights reserved
#
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://spdx.org/licenses/BSD-3-Clause.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

#!/usr/bin/env python
# -*- coding: utf-8 -*-

import torch
from apex import amp
from mmdet.models.backbones.resnet import ResNet
AMP_MODE = True

model = ResNet(depth=50,
           num_stages=4,
           out_indices=(0, 1, 2, 3),
           frozen_stages=1,
           norm_cfg=dict(type='BN', requires_grad=False),
           norm_eval=True,
           style='pytorch')
x = torch.randn(2,3,800,800)
optimizer = torch.optim.SGD(model.parameters(),0.1)

def print_func(inputs, prefix):
    if isinstance(inputs, tuple):
        for i in inputs:
            print_func(i, prefix)
    elif isinstance(inputs, torch.Tensor):
        print(prefix, inputs.shape, inputs.dtype)
    else:
        print(prefix, inputs)

# 设置hook func
def hook_func(name, module):
    def hook_function(module, inputs, outputs):
        print(module)
        print_func(inputs, name+' inputs')
        print_func(outputs, name+' outputs')
    return hook_function

# 注册正反向hook
for name, module in model.named_modules():
    module.register_forward_hook(hook_func('[forward]: '+name, module))
    module.register_backward_hook(hook_func('[backward]: '+name, module))

torch.npu.set_device("npu:0")
model = model.npu()
print(model)
x = x.npu()

if AMP_MODE:
    model, optimizer = amp.initialize(model, optimizer, opt_level='O2', loss_scale=1.0)

o = model(x)
l1,l2,l3,l4 = [i.mean() for i in o]
l = l1+l2+l3+l4
print(l, l.dtype)
l.backward()
