"""
this script define the model.
There is no relu layer after the convolution layer.
"""
import torch
import argparse
from torch import nn


class ConvNetQuake(nn.Module):
    def __init__(self, args) -> None:
        super(ConvNetQuake, self).__init__()

        self.conv_block = nn.Sequential()
        self.conv_block.add_module("conv1",
                                   nn.Conv1d(in_channels=3, out_channels=32, kernel_size=3, stride=2, padding=1))
        # self.conv_block.add_module("relu1", nn.ReLU())
        for i in range(2, 9):
            self.conv_block.add_module(f"conv{i}",
                                       nn.Conv1d(in_channels=32, out_channels=32, kernel_size=3, stride=2, padding=1))
            # self.conv_block.add_module(f"relu{i}", nn.ReLU())

        self.fc = nn.Linear(in_features=128, out_features=args.n_cluster + 1)  # 输出纬度为n_cluster+1(no event)

    def forward(self, x):
        x = self.conv_block(x)
        x = torch.flatten(x, start_dim=1)
        x = self.fc(x)
        x = torch.softmax(x, dim=1)
        # torch.argmax(x, dim=1)
        return x


if __name__ == "__main__":
    # for test
    parser = argparse.ArgumentParser()
    parser.add_argument("--n_cluster", type=int, default=6,
                        help="the number of cluster")
    args = parser.parse_args()

    net = ConvNetQuake(args)
    # net.eval()
    data = torch.rand([10, 3, 1000])
    print(data)
    out = net(data)
    print(out.shape)
    print(out)
    print(torch.argmax(out, dim=1))
