{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#server ip:port\n",
    "#    http://114.213.253.204:8899"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import torch\n",
    "from torch import nn\n",
    "import utils\n",
    "import torch.nn.functional as F\n",
    "from torch.utils.data import DataLoader\n",
    "from torchvision import datasets, transforms\n",
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "epochs = 20000\n",
    "batch_size = 100\n",
    "learning_rate = 1e-2\n",
    "print_rate = 100"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [],
   "source": [
    "class AEC_Net(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(AEC_Net, self).__init__()\n",
    "        self.conv1 = nn.Conv2d(in_channels=1,out_channels=64,kernel_size=(1,6),padding=(0,2))\n",
    "        self.maxpool1 = nn.MaxPool2d((1,4))\n",
    "        self.conv2 = nn.Conv2d(in_channels=64,out_channels=128,kernel_size=(88,1))\n",
    "        self.maxpool2 = nn.MaxPool2d((1,2))\n",
    "        self.rnn = nn.LSTM(input_size=128,hidden_size=256,batch_first=True)\n",
    "        self.linear=nn.Linear(in_features=256,out_features=2)\n",
    "        self.dropout = nn.Dropout(0.6)\n",
    "\n",
    "    def forward(self, input_data):\n",
    "        #input (BatchSize,channels,W,H)\n",
    "\n",
    "        x=input_data.permute(0,1,3,2)\n",
    "        x=F.pad(x,(0,1,0,0))\n",
    "        x=self.conv1(x)\n",
    "        x=self.maxpool1(x)\n",
    "        x=F.relu(x)\n",
    "        x=self.conv2(x)\n",
    "        x=F.relu(x)\n",
    "        x=self.maxpool2(x)\n",
    "        x=x.squeeze()\n",
    "        x=x.permute(0,2,1)\n",
    "        x=self.rnn(x)\n",
    "        x=x[0]\n",
    "        x=x[:,-1,:]\n",
    "        x=self.linear(x)\n",
    "        x=self.dropout(x)\n",
    "        x=F.softmax(x,dim=1)\n",
    "        return x\n",
    "\n",
    "\n",
    "\n",
    "def train_loop(dataloader, model, loss_fn, optimizer):\n",
    "    for batch, (X, y) in enumerate(dataloader):\n",
    "        # Compute prediction and loss\n",
    "        X = X.to(device)\n",
    "        pred = model(X)\n",
    "        y = y.to(device)\n",
    "        pred = pred.to(device)\n",
    "        loss = loss_fn(pred, y)\n",
    "\n",
    "        # Backpropagation\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "def test_loop(dataloader, model, loss_fn):\n",
    "    size = len(dataloader.dataset)\n",
    "    num_batches = len(dataloader)\n",
    "    test_loss, correct = 0, 0\n",
    "    with torch.no_grad():\n",
    "        for X, y in dataloader:\n",
    "            X = X.to(device)\n",
    "            y = y.to(device)\n",
    "            pred = model(X).to(device)\n",
    "            test_loss += loss_fn(pred, y).item()\n",
    "            correct += (pred.argmax(1) == y).type(torch.float).sum().item()\n",
    "\n",
    "    test_loss /= num_batches\n",
    "    correct /= size\n",
    "    print(f\"Test Error: \\n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \\n\")\n",
    "\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "outputs": [],
   "source": [
    "def oooo():\n",
    "\n",
    "    # a=np.random.randint(0,10,(1200, 1, 512, 88))\n",
    "    # b=np.random.randint(10,20,(1200, 1, 512, 88))\n",
    "    # d=np.zeros((1200))\n",
    "    # e=np.ones((1200))\n",
    "    # c=np.vstack((a,b))\n",
    "    # f=np.hstack((d,e))\n",
    "    # embedding=torch.tensor(c,dtype=torch.float)\n",
    "    # label=torch.tensor(f,dtype=torch.long)\n",
    "\n",
    "    # embedding=torch.tensor(np.load(\"embedding.npy\"),dtype=torch.float32)\n",
    "    # label=torch.tensor(np.load(\"label.npy\"),dtype=torch.long)\n",
    "    embedding=torch.tensor(np.load(\"embedding_raw.npy\"),dtype=torch.float32)\n",
    "    label=torch.tensor(np.load(\"label_raw.npy\"),dtype=torch.long)\n",
    "    embeddings=embedding[:,np.newaxis,:,:]\n",
    "\n",
    "    dataset = torch.utils.data.TensorDataset(F.normalize(embeddings,dim=2), label)\n",
    "\n",
    "    train, test = torch.utils.data.random_split(dataset, (80, 20))\n",
    "    train_dataloader = DataLoader(train, batch_size=batch_size, shuffle=True)\n",
    "    test_dataloader = DataLoader(test, batch_size=batch_size, shuffle=True)\n",
    "\n",
    "    model = AEC_Net()\n",
    "    model.to(device)\n",
    "    # Initialize\n",
    "\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)\n",
    "\n",
    "    for t in range(epochs):\n",
    "        train_loop(dataloader=train_dataloader, model=model, loss_fn=loss_fn, optimizer=optimizer)\n",
    "        if t%print_rate==0:\n",
    "            print(f\"Epoch {t }\\n-------------------------------\")\n",
    "            test_loop(dataloader=test_dataloader, model=model, loss_fn=loss_fn)\n",
    "\n",
    "    print(\"Done!\")"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 0\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.692424 \n",
      "\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.693870 \n",
      "\n",
      "Epoch 200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.703874 \n",
      "\n",
      "Epoch 300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.702734 \n",
      "\n",
      "Epoch 400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.712380 \n",
      "\n",
      "Epoch 500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.727347 \n",
      "\n",
      "Epoch 600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.702085 \n",
      "\n",
      "Epoch 700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.697737 \n",
      "\n",
      "Epoch 800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.717145 \n",
      "\n",
      "Epoch 900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.693920 \n",
      "\n",
      "Epoch 1000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.701882 \n",
      "\n",
      "Epoch 1100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.709732 \n",
      "\n",
      "Epoch 1200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.721070 \n",
      "\n",
      "Epoch 1300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.723179 \n",
      "\n",
      "Epoch 1400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.697310 \n",
      "\n",
      "Epoch 1500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.700621 \n",
      "\n",
      "Epoch 1600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.728126 \n",
      "\n",
      "Epoch 1700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.697022 \n",
      "\n",
      "Epoch 1800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.702384 \n",
      "\n",
      "Epoch 1900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.711477 \n",
      "\n",
      "Epoch 2000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.694439 \n",
      "\n",
      "Epoch 2100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.715806 \n",
      "\n",
      "Epoch 2200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.715164 \n",
      "\n",
      "Epoch 2300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.698846 \n",
      "\n",
      "Epoch 2400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.702785 \n",
      "\n",
      "Epoch 2500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.697316 \n",
      "\n",
      "Epoch 2600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.717825 \n",
      "\n",
      "Epoch 2700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.719254 \n",
      "\n",
      "Epoch 2800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.708176 \n",
      "\n",
      "Epoch 2900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.716611 \n",
      "\n",
      "Epoch 3000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705527 \n",
      "\n",
      "Epoch 3100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.711625 \n",
      "\n",
      "Epoch 3200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705028 \n",
      "\n",
      "Epoch 3300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.700897 \n",
      "\n",
      "Epoch 3400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.710181 \n",
      "\n",
      "Epoch 3500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.700509 \n",
      "\n",
      "Epoch 3600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705723 \n",
      "\n",
      "Epoch 3700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.702679 \n",
      "\n",
      "Epoch 3800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705740 \n",
      "\n",
      "Epoch 3900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.703117 \n",
      "\n",
      "Epoch 4000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.713861 \n",
      "\n",
      "Epoch 4100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.702983 \n",
      "\n",
      "Epoch 4200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.692380 \n",
      "\n",
      "Epoch 4300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.706703 \n",
      "\n",
      "Epoch 4400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.717371 \n",
      "\n",
      "Epoch 4500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.699467 \n",
      "\n",
      "Epoch 4600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.715446 \n",
      "\n",
      "Epoch 4700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.698386 \n",
      "\n",
      "Epoch 4800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.711123 \n",
      "\n",
      "Epoch 4900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.704709 \n",
      "\n",
      "Epoch 5000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.714533 \n",
      "\n",
      "Epoch 5100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.708017 \n",
      "\n",
      "Epoch 5200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.706245 \n",
      "\n",
      "Epoch 5300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.721189 \n",
      "\n",
      "Epoch 5400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.710459 \n",
      "\n",
      "Epoch 5500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.713171 \n",
      "\n",
      "Epoch 5600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.706858 \n",
      "\n",
      "Epoch 5700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.688023 \n",
      "\n",
      "Epoch 5800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.714215 \n",
      "\n",
      "Epoch 5900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705640 \n",
      "\n",
      "Epoch 6000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705103 \n",
      "\n",
      "Epoch 6100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.700444 \n",
      "\n",
      "Epoch 6200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.710734 \n",
      "\n",
      "Epoch 6300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.714562 \n",
      "\n",
      "Epoch 6400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.704816 \n",
      "\n",
      "Epoch 6500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.694413 \n",
      "\n",
      "Epoch 6600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.707576 \n",
      "\n",
      "Epoch 6700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.701296 \n",
      "\n",
      "Epoch 6800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.695781 \n",
      "\n",
      "Epoch 6900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.711131 \n",
      "\n",
      "Epoch 7000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.704215 \n",
      "\n",
      "Epoch 7100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.707869 \n",
      "\n",
      "Epoch 7200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.713064 \n",
      "\n",
      "Epoch 7300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.695538 \n",
      "\n",
      "Epoch 7400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.700613 \n",
      "\n",
      "Epoch 7500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.713254 \n",
      "\n",
      "Epoch 7600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.724510 \n",
      "\n",
      "Epoch 7700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.697287 \n",
      "\n",
      "Epoch 7800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.707713 \n",
      "\n",
      "Epoch 7900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705078 \n",
      "\n",
      "Epoch 8000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.718235 \n",
      "\n",
      "Epoch 8100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.716988 \n",
      "\n",
      "Epoch 8200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.682687 \n",
      "\n",
      "Epoch 8300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.711658 \n",
      "\n",
      "Epoch 8400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.705667 \n",
      "\n",
      "Epoch 8500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.706462 \n",
      "\n",
      "Epoch 8600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.713248 \n",
      "\n",
      "Epoch 8700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.707899 \n",
      "\n",
      "Epoch 8800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.717452 \n",
      "\n",
      "Epoch 8900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.704146 \n",
      "\n",
      "Epoch 9000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.718190 \n",
      "\n",
      "Epoch 9100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.687807 \n",
      "\n",
      "Epoch 9200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.704550 \n",
      "\n",
      "Epoch 9300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.711825 \n",
      "\n",
      "Epoch 9400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.714627 \n",
      "\n",
      "Epoch 9500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.695472 \n",
      "\n",
      "Epoch 9600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.717943 \n",
      "\n",
      "Epoch 9700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.709610 \n",
      "\n",
      "Epoch 9800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.714572 \n",
      "\n",
      "Epoch 9900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.709365 \n",
      "\n",
      "Epoch 10000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.713273 \n",
      "\n",
      "Epoch 10100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.706813 \n",
      "\n",
      "Epoch 10200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.684133 \n",
      "\n",
      "Epoch 10300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.683031 \n",
      "\n",
      "Epoch 10400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.695096 \n",
      "\n",
      "Epoch 10500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.706703 \n",
      "\n",
      "Epoch 10600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.686770 \n",
      "\n",
      "Epoch 10700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.697052 \n",
      "\n",
      "Epoch 10800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.680458 \n",
      "\n",
      "Epoch 10900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.712241 \n",
      "\n",
      "Epoch 11000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.703520 \n",
      "\n",
      "Epoch 11100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.707356 \n",
      "\n",
      "Epoch 11200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.699501 \n",
      "\n",
      "Epoch 11300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.669805 \n",
      "\n",
      "Epoch 11400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.678044 \n",
      "\n",
      "Epoch 11500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.674328 \n",
      "\n",
      "Epoch 11600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.703887 \n",
      "\n",
      "Epoch 11700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.693890 \n",
      "\n",
      "Epoch 11800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.669708 \n",
      "\n",
      "Epoch 11900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.708192 \n",
      "\n",
      "Epoch 12000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.667805 \n",
      "\n",
      "Epoch 12100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.658224 \n",
      "\n",
      "Epoch 12200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.663712 \n",
      "\n",
      "Epoch 12300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.673726 \n",
      "\n",
      "Epoch 12400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.741539 \n",
      "\n",
      "Epoch 12500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 65.0%, Avg loss: 0.617186 \n",
      "\n",
      "Epoch 12600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.670143 \n",
      "\n",
      "Epoch 12700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.674682 \n",
      "\n",
      "Epoch 12800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.754327 \n",
      "\n",
      "Epoch 12900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.732937 \n",
      "\n",
      "Epoch 13000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 70.0%, Avg loss: 0.607094 \n",
      "\n",
      "Epoch 13100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.740439 \n",
      "\n",
      "Epoch 13200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.731900 \n",
      "\n",
      "Epoch 13300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.667558 \n",
      "\n",
      "Epoch 13400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.726066 \n",
      "\n",
      "Epoch 13500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.688907 \n",
      "\n",
      "Epoch 13600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.763683 \n",
      "\n",
      "Epoch 13700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.633654 \n",
      "\n",
      "Epoch 13800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.688395 \n",
      "\n",
      "Epoch 13900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 65.0%, Avg loss: 0.668479 \n",
      "\n",
      "Epoch 14000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.748479 \n",
      "\n",
      "Epoch 14100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.695536 \n",
      "\n",
      "Epoch 14200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.681619 \n",
      "\n",
      "Epoch 14300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.697366 \n",
      "\n",
      "Epoch 14400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.747293 \n",
      "\n",
      "Epoch 14500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.814599 \n",
      "\n",
      "Epoch 14600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.621903 \n",
      "\n",
      "Epoch 14700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 60.0%, Avg loss: 0.663088 \n",
      "\n",
      "Epoch 14800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.729483 \n",
      "\n",
      "Epoch 14900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.685340 \n",
      "\n",
      "Epoch 15000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.680622 \n",
      "\n",
      "Epoch 15100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.670644 \n",
      "\n",
      "Epoch 15200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.735580 \n",
      "\n",
      "Epoch 15300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.718809 \n",
      "\n",
      "Epoch 15400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.747118 \n",
      "\n",
      "Epoch 15500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.785375 \n",
      "\n",
      "Epoch 15600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.724037 \n",
      "\n",
      "Epoch 15700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.651602 \n",
      "\n",
      "Epoch 15800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 60.0%, Avg loss: 0.630110 \n",
      "\n",
      "Epoch 15900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.765056 \n",
      "\n",
      "Epoch 16000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.764269 \n",
      "\n",
      "Epoch 16100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.769286 \n",
      "\n",
      "Epoch 16200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.758959 \n",
      "\n",
      "Epoch 16300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.641700 \n",
      "\n",
      "Epoch 16400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.688905 \n",
      "\n",
      "Epoch 16500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 30.0%, Avg loss: 0.797884 \n",
      "\n",
      "Epoch 16600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.726478 \n",
      "\n",
      "Epoch 16700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.717316 \n",
      "\n",
      "Epoch 16800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.702534 \n",
      "\n",
      "Epoch 16900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.762855 \n",
      "\n",
      "Epoch 17000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.713305 \n",
      "\n",
      "Epoch 17100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.782022 \n",
      "\n",
      "Epoch 17200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.750598 \n",
      "\n",
      "Epoch 17300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.714910 \n",
      "\n",
      "Epoch 17400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.823418 \n",
      "\n",
      "Epoch 17500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.709909 \n",
      "\n",
      "Epoch 17600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 60.0%, Avg loss: 0.716899 \n",
      "\n",
      "Epoch 17700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 30.0%, Avg loss: 0.733117 \n",
      "\n",
      "Epoch 17800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.716834 \n",
      "\n",
      "Epoch 17900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.751708 \n",
      "\n",
      "Epoch 18000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 65.0%, Avg loss: 0.659186 \n",
      "\n",
      "Epoch 18100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.749550 \n",
      "\n",
      "Epoch 18200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.800248 \n",
      "\n",
      "Epoch 18300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.811986 \n",
      "\n",
      "Epoch 18400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.717464 \n",
      "\n",
      "Epoch 18500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.746431 \n",
      "\n",
      "Epoch 18600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.705182 \n",
      "\n",
      "Epoch 18700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.621587 \n",
      "\n",
      "Epoch 18800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.719057 \n",
      "\n",
      "Epoch 18900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 30.0%, Avg loss: 0.816302 \n",
      "\n",
      "Epoch 19000\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 35.0%, Avg loss: 0.822389 \n",
      "\n",
      "Epoch 19100\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.761310 \n",
      "\n",
      "Epoch 19200\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.733357 \n",
      "\n",
      "Epoch 19300\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 40.0%, Avg loss: 0.758076 \n",
      "\n",
      "Epoch 19400\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.758328 \n",
      "\n",
      "Epoch 19500\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 55.0%, Avg loss: 0.769395 \n",
      "\n",
      "Epoch 19600\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.747384 \n",
      "\n",
      "Epoch 19700\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.714327 \n",
      "\n",
      "Epoch 19800\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 45.0%, Avg loss: 0.705880 \n",
      "\n",
      "Epoch 19900\n",
      "-------------------------------\n",
      "Test Error: \n",
      " Accuracy: 50.0%, Avg loss: 0.717325 \n",
      "\n",
      "Done!\n"
     ]
    }
   ],
   "source": [
    "oooo()\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([16, 256])\n",
      "torch.Size([16, 2])\n"
     ]
    }
   ],
   "source": [
    "input_data=torch.randn((16,1,512,88))\n",
    "conv1 = nn.Conv2d(in_channels=1,out_channels=64,kernel_size=(1,6),padding=(0,2))\n",
    "maxpool1 = nn.MaxPool2d((1,4))\n",
    "conv2 = nn.Conv2d(in_channels=64,out_channels=128,kernel_size=(88,1))\n",
    "maxpool2 = nn.MaxPool2d((1,2))\n",
    "rnn = nn.LSTM(input_size=128,hidden_size=256,batch_first=True)\n",
    "linear=nn.Linear(in_features=256,out_features=2)\n",
    "x=input_data.permute(0,1,3,2)\n",
    "x=F.pad(x,(0,1,0,0))\n",
    "x=conv1(x)\n",
    "x=maxpool1(x)\n",
    "x=conv2(x)\n",
    "x=maxpool2(x)\n",
    "x=x.squeeze()\n",
    "x=x.permute(0,2,1)\n",
    "x=rnn(x)\n",
    "x=x[0]\n",
    "x=x[:,-1,:]\n",
    "print(x.shape)\n",
    "x=linear(x)\n",
    "print(x.shape)\n",
    "y=F.softmax(x,dim=1)"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [
    {
     "data": {
      "text/plain": "torch.Size([100, 512, 88])"
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "embeddings=torch.tensor(np.load(\"embedding_raw.npy\"),dtype=torch.float32)\n",
    "label=torch.tensor(np.load(\"label_raw.npy\"),dtype=torch.long)\n",
    "#embeddings=embedding[:,np.newaxis,:,:]\n",
    "embeddings.shape"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "outputs": [
    {
     "data": {
      "text/plain": "tensor([0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1,\n        0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1,\n        1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0,\n        0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0,\n        0, 1, 1, 0])"
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "label\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}