#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time    : 2020/12/12 10:25
# @Author  : lxy
import torch
from torch import nn


class TextCNN(nn.Module):
    def __init__(self,category,num_embedding):
        super(TextCNN,self).__init__()
        self.embedding = nn.Embedding(num_embeddings=num_embedding,embedding_dim=50)
        self.linear = nn.Linear(in_features=2*49,out_features=category)
        self.lstm = nn.LSTM(input_size=50, hidden_size=50, num_layers=2, bias=False, dropout=0.5, batch_first=True)
        self.run1 = self.make_layer(3)
        self.run2 = self.make_layer(4)
        self.run3 = self.make_layer(5)
        self.maxpooling =  nn.MaxPool2d(kernel_size=(12,1))
        self.softmax = nn.Softmax(dim=2)
        self.relu = nn.ReLU()

    def make_layer(self,kernel_size):
        return nn.Sequential(
            nn.Conv1d(in_channels=2,out_channels=2,kernel_size=(kernel_size,50),padding=0),
            nn.BatchNorm2d(2),
            nn.ReLU(),
            nn.Dropout(0.5),
            nn.Conv1d(in_channels=2, out_channels=2, kernel_size=(kernel_size, 1), padding=0),
            nn.BatchNorm2d(2),
            nn.ReLU(),
            nn.Dropout(0.5),
            nn.MaxPool2d(kernel_size=(kernel_size,1),padding=0,stride=(int(kernel_size/2+1),1))
        )


    def forward(self,x,input):
        x = torch.unsqueeze(x, 1)
        x_copy = self.embedding(input)
        x_copy = x_copy.unsqueeze(1)
        x = torch.cat((x, x_copy), 1).float()
        a = self.run1(x)
        b = self.run2(x)
        c = self.run3(x)
        y = torch.cat((a,b,c),2)
        y = y.squeeze(3)
        y = y.view(-1,1,2*49)
        y = self.linear(y)
        y = self.softmax(y)
        return y

class TextCNN2(nn.Module):
    def __init__(self,kernel_size,category):
        super(TextCNN2,self).__init__()
        self.lstm = nn.LSTM(input_size=50, hidden_size=50, num_layers=1, bias=False, dropout=0.5, batch_first=True)
        self.conv1 = self.block(kernel_size,1)
        self.conv2 = self.block(kernel_size,2)
        self.conv3 = self.block(kernel_size,4)
        self.conv4 = self.block(kernel_size,8)
        self.avgpooling = nn.AvgPool2d(kernel_size=3)
        self.relu = nn.ReLU(inplace=True)
        self.One = nn.Conv2d(in_channels=16,out_channels=1,kernel_size=3)
        self.two = nn.Conv1d(in_channels=8,out_channels=1,kernel_size=3)
        self.linear = nn.Linear(in_features=528, out_features=category,bias=True)
        self.softmax = nn.Softmax(dim=2)
    def block(self,kernel_size,channel):
        return nn.Sequential(
            # nn.Dropout(0.5),
            nn.Conv2d(in_channels=channel,out_channels=channel,kernel_size=kernel_size,padding=1),
            nn.ReLU(inplace=True),
            nn.BatchNorm2d(num_features=channel,affine=True),
            # nn.Dropout(0.5),
            nn.Conv2d(in_channels=channel, out_channels=channel, kernel_size=kernel_size, padding=1),
            nn.BatchNorm2d(num_features=channel, affine=True),
            nn.ReLU(inplace=True),
            # nn.Dropout(0.5),
            nn.Conv2d(in_channels=channel, out_channels=channel, kernel_size=kernel_size, padding=1),
            nn.BatchNorm2d(num_features=channel, affine=True),
            nn.ReLU(inplace=True)
        )

    def forward(self,x):
        x,_ = self.lstm(x)
        x = torch.unsqueeze(x, 1)
        x_copy = x
        x_copy = self.conv1(x_copy)
        x = x + x_copy
        x = self.relu(x)
        x = self.avgpooling(x)
        # print(x.shape)
        x = x.reshape(-1,1,528)
        y = self.linear(x)
        y = self.softmax(y)

        return y

class Teacher(nn.Module):
    def __init__(self,category):
        super(Teacher,self).__init__()
        self.lstm = nn.LSTM(input_size=50, hidden_size=50, num_layers=2, bias=False, dropout=0.5, batch_first=True)
        self.linear = nn.Linear(in_features=7*7, out_features=category,bias=True)
        self.softmax = nn.Softmax(dim=2)
        self.avgpooling = nn.AvgPool2d(kernel_size=7)
    def forward(self,x):
        x,_ = self.lstm(x)
        x = self.avgpooling(x)
        # print(x.shape)
        x = x.reshape(-1,1,7*7)
        y = self.linear(x)
        y = self.softmax(y)
        return y


