#非线性激活函数ReLU的使用
import torch
import torchvision
from torch import nn
from torch.nn import ReLU, Sigmoid
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter

dataset = torchvision.datasets.CIFAR10("./dataset",train=False,transform=torchvision.transforms.ToTensor(),download=True)
dataloader = DataLoader(dataset,batch_size=64)

# input = torch.tensor([[1,-0.5],
#                       [-1,3]])
# input = torch.reshape(input,(-1,1,2,2))
# print(input.shape)

class MyModule(nn.Module):
    def __init__(self):
        super(MyModule, self).__init__()
        self.relu = ReLU(inplace=False)
        self.sigmoid = Sigmoid()

    def forward(self,input):
        output = self.sigmoid(input)
        return output


mymodule = MyModule()
# output = mymodule(input)
# print(output)
step = 0
writer = SummaryWriter("activations_logs")
for data in dataloader:
    imgs,targets = data
    writer.add_images("input",imgs,step)
    output = mymodule(imgs)
    writer.add_images("activation_output",output,step)
    step += 1

writer.close()