# Copyright (c) Soumith Chintala 2016,
# All rights reserved
#
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://spdx.org/licenses/BSD-3-Clause.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import torch
import torch.nn as nn
torch.npu.set_device("npu:0")


class Model(nn.Module):
    def __init__(self,
                 inplanes=64,
                 planes=64,
                 stride=1,
                 dilation=1,
                 downsample=None,
                 style='pytorch',
                 with_cp=False,
                 conv_cfg=None,
                 norm_cfg=dict(type='BN', requires_grad=False),
                 dcn=None,
                 plugins=None):
        super(Model, self).__init__()
        
        self.conv1 = nn.Conv2d(64,64,3)
        self.bn1 = nn.BatchNorm2d(64)
        
        self.norm1_name = 'bn1'
        for p in self.bn1.parameters():
            print(p, 'set requires_grad false')
            p.requires_grad = False
                                                    
    def forward(self, x):
        
        x = self.conv1(x)
        print("conv:", x.shape)
        x = self.bn1(x)
        print("bn1:", x.shape)
        return x


x = torch.randn(2,64,32,32)
x.require_grad = True

model = Model().npu()
print(model)

x = x.npu()
x.npu_format_cast(2)

o = model(x)
l = o.sum()
l.backward()
print(l)

