# -*- coding: utf-8 -*-
# @Time : 2021-11-17 16:19
# @Author : lwb
# @Site : 
# @File : CNN.py
import torch.nn as nn

import torch
import torch.nn.functional as F
class CNN(nn.Module):
    def __init__(self,vocab_size,embedding_dim,filter_size,num_filter,num_class):
        super(CNN, self).__init__()
        self.embedding=nn.Embedding(vocab_size,embedding_dim)
        # 卷积核，padding=1 表示在卷积操作之前，将序列的前后个补充一个输出，filter_size（卷积核大小),num_filter（卷积核的个数
        self.conv1d=nn.Conv1d(embedding_dim,num_filter,filter_size,padding=1)
        self.activate=F.relu
        self.linear=nn.Linear(num_filter,num_class)

    def forward(self,inputs):
        embedding=self.embedding(inputs)
        convolution=self.activate(self.conv1d(embedding.permute(0,2,1)))
        pooling=F.max_pool1d(convolution,kernel_size=convolution.shape[2])
        outputs=self.linear(pooling.squeeze(dim=2))
        log_probs=F.log_softmax(outputs,dim=1)
        return log_probs

