#!/usr/bin/env python  
#-*- coding:utf-8 _*-  
""" 
@author:hello_life 
@license: Apache Licence 
@file: model.py 
@time: 2022/03/29
@software: PyCharm 
description:
"""
import os
import torch
from torch import nn
import numpy as np
from torch.utils.data import DataLoader

class NeuralNetwork(nn.Module):
    def __init__(self):
        super(NeuralNetwork,self).__init__()
        self.fc1=nn.Linear(28*28,512)
        self.relu1=nn.ReLU()
        self.fc2=nn.Linear(512,512)
        self.relu2=nn.ReLU()
        self.fc3=nn.Linear(512,10)

    def forward(self,x):
        x=self.fc1(x)
        x=self.relu1(x)
        x=self.fc2(x)
        x=self.relu2(x)
        x=self.fc3(x)
        return x


class TextClassification(nn.Module):
    def __init__(self,config):
        super(TextClassification,self).__init__()
        # self.embedding=nn.EmbeddingBag(config.vocab_size,300,sparse=False)
        self.embedding_pretrained=torch.tensor(np.load(config.pretrain_save_dir+".npz")["embeddings"],dtype=torch.float)
        self.embedding=nn.Embedding.from_pretrained(self.embedding_pretrained,freeze=False)
        self.lstm=nn.LSTM(300,100,num_layers=2,batch_first=True)
        self.fc=nn.Linear(100,2)

    def forward(self,x):
        embedded=self.embedding(x)
        out,(h_0,c_0)=self.lstm(embedded)
        out=self.fc(out[:,-1,:])
        return out


if __name__ == '__main__':
    model=NeuralNetwork().to(device)
    x=torch.rand(1,784,device=device)
    logits=model(x)
    pred_probab=nn.Softmax(dim=1)(logits)
    y_pre=pred_probab.argmax(1)
    print(pred_probab,y_pre)