{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# 1. 简易加载数据"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "ham\tGo until jurong point, crazy.. Available only in bugis n great world la e buffet... Cine there got amore wat...\n",
      "\n",
      "5574\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torch.utils.data import DataLoader, Dataset\n",
    "\n",
    "data_path = r\"data/dataset/SMSSpamCollection\"\n",
    "\n",
    "\n",
    "class MyDataset(Dataset):\n",
    "    def __init__(self):\n",
    "        self.lines = open(data_path, encoding=\"UTF-8\").readlines()\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        return self.lines[index]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.lines)\n",
    "\n",
    "\n",
    "my_dataset = MyDataset()\n",
    "print(my_dataset[0])\n",
    "print(len(my_dataset))"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "# 2. 对数据集进行处理"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 ('ham', 'Go until jurong point, crazy.. Available only in bugis n great world la e buffet... Cine there got amore wat...')\n",
      "1 ('ham', 'Ok lar... Joking wif u oni...')\n",
      "2 ('spam', \"Free entry in 2 a wkly comp to win FA Cup final tkts 21st May 2005. Text FA to 87121 to receive entry question(std txt rate)T&C's apply 08452810075over18's\")\n",
      "3 ('ham', 'U dun say so early hor... U c already then say...')\n",
      "4 ('ham', \"Nah I don't think he goes to usf, he lives around here though\")\n",
      "5 ('spam', \"FreeMsg Hey there darling it's been 3 week's now and no word back! I'd like some fun you up for it still? Tb ok! XxX std chgs to send, £1.50 to rcv\")\n",
      "6 ('ham', 'Even my brother is not like to speak with me. They treat me like aids patent.')\n",
      "7 ('ham', \"As per your request 'Melle Melle (Oru Minnaminunginte Nurungu Vettam)' has been set as your callertune for all Callers. Press *9 to copy your friends Callertune\")\n",
      "8 ('spam', 'WINNER!! As a valued network customer you have been selected to receivea £900 prize reward! To claim call 09061701461. Claim code KL341. Valid 12 hours only.')\n",
      "9 ('spam', 'Had your mobile 11 months or more? U R entitled to Update to the latest colour mobiles with camera for Free! Call The Mobile Update Co FREE on 08002986030')\n",
      "10 ('ham', \"I'm gonna be home soon and i don't want to talk about this stuff anymore tonight, k? I've cried enough today.\")\n",
      "11 ('spam', 'SIX chances to win CASH! From 100 to 20,000 pounds txt> CSH11 and send to 87575. Cost 150p/day, 6days, 16+ TsandCs apply Reply HL 4 info')\n",
      "12 ('spam', 'URGENT! You have won a 1 week FREE membership in our £100,000 Prize Jackpot! Txt the word: CLAIM to No: 81010 T&C www.dbuk.net LCCLTD POBOX 4403LDNW1A7RW18')\n",
      "13 ('ham', \"I've been searching for the right words to thank you for this breather. I promise i wont take your help for granted and will fulfil my promise. You have been wonderful and a blessing at all times.\")\n",
      "14 ('ham', 'I HAVE A DATE ON SUNDAY WITH WILL!!')\n",
      "15 ('spam', 'XXXMobileMovieClub: To use your credit, click the WAP link in the next txt message or click here>> http://wap. xxxmobilemovieclub.com?n=QJKGIGHJJGCBL')\n",
      "16 ('ham', \"Oh k...i'm watching here:)\")\n",
      "17 ('ham', 'Eh u remember how 2 spell his name... Yes i did. He v naughty make until i v wet.')\n",
      "18 ('ham', 'Fine if that\\x92s the way u feel. That\\x92s the way its gota b')\n",
      "19 ('spam', 'England v Macedonia - dont miss the goals/team news. Txt ur national team to 87077 eg ENGLAND to 87077 Try:WALES, SCOTLAND 4txt/ú1.20 POBOXox36504W45WQ 16+')\n",
      "20 ('ham', 'Is that seriously how you spell his name?')\n",
      "21 ('ham', 'I‘m going to try for 2 months ha ha only joking')\n"
     ]
    }
   ],
   "source": [
    "from torch.utils.data import Dataset, DataLoader\n",
    "import pandas as pd\n",
    "\n",
    "data_path = r\"data/dataset/SMSSpamCollection\"\n",
    "\n",
    "\n",
    "class CifarDataset(Dataset):\n",
    "    def __init__(self):\n",
    "        lines = open(data_path, \"r\", encoding=\"UTF-8\")\n",
    "        #对数据进行处理，前4个为label，后面的为短信内容\n",
    "        lines = [[i[:4].strip(), i[4:].strip()] for i in lines]\n",
    "        #转化为dataFrame\n",
    "        self.df = pd.DataFrame(lines, columns=[\"label\", \"sms\"])\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        single_item = self.df.iloc[index, :]\n",
    "        return single_item.values[0], single_item.values[1]\n",
    "\n",
    "    def __len__(self):\n",
    "        return self.df.shape[0]\n",
    "\n",
    "\n",
    "d = CifarDataset()\n",
    "for i in range(len(d)):\n",
    "    print(i, d[i])\n",
    "    if i > 20:\n",
    "        break"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "# 3. 进行迭代数据"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 ('ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham') (\"Lol you won't feel bad when I use her money to take you out to a steak dinner =D\", 'Bognor it is! Should be splendid at this time of year.', 'How much i gave to you. Morning.', \"Sorry,in meeting I'll call later\", \"Joy's father is John. Then John is the NAME of Joy's father. Mandan\", 'I wud never mind if u dont miss me or if u dont need me.. But u wil really hurt me wen u need me &amp; u dont tell me......... Take care:-)', \"She said,'' do u mind if I go into the bedroom for a minute ? '' ''OK'', I sed in a sexy mood. She came out 5 minuts latr wid a cake...n My Wife,\", \"1's reach home call me.\", 'Lol where do u come up with these ideas?', 'Hey cutie. How goes it? Here in WALES its kinda ok. There is like hills and shit but i still avent killed myself.')\n",
      "****************************************************************************************************\n",
      "1 ('ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham') (\"Nice line said by a broken heart- Plz don't cum 1 more times infront of me... Other wise once again I ll trust U... Good 9t:)\", 'In meeting da. I will call you', 'Night has ended for another day, morning has come in a special way. May you smile like the sunny rays and leaves your worries at the blue blue bay. Gud mrng', 'I bought the test yesterday. Its something that lets you know the exact day u ovulate.when will get 2u in about 2 to 3wks. But pls pls dont fret. I know u r worried. Pls relax. Also is there anything in ur past history u need to tell me?', 'U free on sat rite? U wan 2 watch infernal affairs wif me n darren n mayb xy?', 'Ok lar i double check wif da hair dresser already he said wun cut v short. He said will cut until i look nice.', 'Yup no more already... Thanx 4 printing n handing it up.', \"Yeah hopefully, if tyler can't do it I could maybe ask around a bit\", \"It's ok i wun b angry. Msg u aft i come home tonight.\", 'Dont search love, let love find U. Thats why its called falling in love, bcoz U dont force yourself, U just fall and U know there is smeone to hold U... BSLVYL')\n",
      "****************************************************************************************************\n",
      "2 ('ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'spam', 'ham', 'ham') (\"She's good. How are you. Where r u working now\", 'I know that my friend already told that.', 'Until 545 lor... Ya, can go 4 dinner together...', \"I'm on da bus going home...\", 'Just seeing your missed call my dear brother. Do have a gr8 day.', 'Al he does is moan at me if n e thin goes wrong its my fault&al de arguments r my fault&fed up of him of himso y bother? Hav 2go, thanx.xx', 'Ok then i come n pick u at engin?', 'Get your garden ready for summer with a FREE selection of summer bulbs and seeds worth £33:50 only with The Scotsman this Saturday. To stop go2 notxt.co.uk', \"The  &lt;#&gt; g that i saw a few days ago, the guy wants sell wifi only for  &lt;#&gt;  and with 3g for  &lt;#&gt; . That's why i blanked him.\", 'What part of \"don\\'t initiate\" don\\'t you understand')\n",
      "****************************************************************************************************\n",
      "3 ('ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham') ('Yes! How is a pretty lady like you single?', 'Package all your programs well', \"Come round, it's .\", \"Sorry da thangam.it's my mistake.\", 'And several to you sir.', \"K..k..i'm also fine:)when will you complete the course?\", 'Ok . . now i am in bus. . If i come soon i will come otherwise tomorrow', \"Can you let me know details of fri when u find out cos I'm not in tom or fri. mentionned chinese. Thanks\", 'Do you know why god created gap between your fingers..? So that, One who is made for you comes &amp; fills those gaps by holding your hand with LOVE..!', \"Cool, text me when you're parked\")\n",
      "****************************************************************************************************\n",
      "4 ('ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'spam', 'ham', 'ham', 'ham') (\"Dunno dat's wat he told me. Ok lor...\", 'Would really appreciate if you call me. Just need someone to talk to.', \"Sir, hope your day is going smoothly. i really hoped i wont have to bother you about this. I have some bills that i can't settle this month. I am out of all extra cash. I know this is a challenging time for you also but i have to let you know.\", 'No need to ke qi... Ü too bored izzit y suddenly thk of this...', 'Okay, good, no problem, and thanx!', 'Yup i thk so until e shop closes lor.', 'The current leading bid is 151. To pause this auction send OUT. Customer Care: 08718726270', 'S..antha num corrct dane', 'No calls..messages..missed calls', 'Tell me again what your address is')\n",
      "****************************************************************************************************\n",
      "5 ('ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'spam', 'ham', 'ham', 'ham') ('Sorry my roommates took forever, it ok if I come by now?', 'I asked sen to come chennai and search for job.', \"Ok... But they said i've got wisdom teeth hidden inside n mayb need 2 remove.\", 'Yes just finished watching days of our lives. I love it.', 'They can try! They can get lost, in fact. Tee hee', 'o turns out i had stereo love on mi phone under the unknown album.', 'Congrats! 2 mobile 3G Videophones R yours. call 09063458130 now! videochat wid your mates, play java games, Dload polyPH music, noline rentl.', 'Message:some text missing* Sender:Name Missing* *Number Missing *Sent:Date missing *Missing U a lot thats y everything is missing sent via fullonsms.com', 'What happened to our yo date?', \"A bloo bloo bloo I'll miss the first bowl\")\n",
      "****************************************************************************************************\n",
      "6 ('ham', 'spam', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham', 'ham') ('We have to pick rayan macleran there.', \"8007 25p 4 Alfie Moon's Children in Need song on ur mob. Tell ur m8s. Txt TONE CHARITY to 8007 for nokias or POLY CHARITY for polys :zed 08701417012 profit 2 charity\", 'Sometimes we put walls around our hearts,not just to be safe from getting hurt.. But to find out who cares enough to break the walls &amp; get closer.. GOODNOON:)', 'Hi juan. Im coming home on fri hey. Of course i expect a welcome party and lots of presents. Ill phone u when i get back. Loads of love nicky x x x x x x x x x', 'Sir, good morning. Hope you had a good weekend. I called to let you know that i was able to raise  &lt;#&gt;  from my dad. He however said he would make the rest available by mid feb. This amount is still quite short and i was hoping you would help. Do have a good day. Abiola', 'Does she usually take fifteen fucking minutes to respond to a yes or no question', 'Die... I accidentally deleted e msg i suppose 2 put in e sim archive. Haiz... I so sad...', 'Even if he my friend he is a priest call him now', 'K..k..any special today?', 'U should have made an appointment')\n",
      "****************************************************************************************************\n"
     ]
    }
   ],
   "source": [
    "from torch.utils.data import Dataset, DataLoader\n",
    "import pandas as pd\n",
    "\n",
    "data_path = r\"data/dataset/SMSSpamCollection\"\n",
    "\n",
    "\n",
    "class CifarDataset(Dataset):\n",
    "    def __init__(self):\n",
    "        lines = open(data_path, \"r\", encoding=\"UTF-8\")\n",
    "        #对数据进行处理，前4个为label，后面的为短信内容\n",
    "        lines = [[i[:4].strip(), i[4:].strip()] for i in lines]\n",
    "        #转化为dataFrame\n",
    "        self.df = pd.DataFrame(lines, columns=[\"label\", \"sms\"])\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        single_item = self.df.iloc[index, :]\n",
    "        return single_item.values[0], single_item.values[1]\n",
    "\n",
    "    def __len__(self):\n",
    "        return self.df.shape[0]\n",
    "\n",
    "\n",
    "dataset = CifarDataset()\n",
    "data_loader = DataLoader(dataset=dataset, batch_size=10, shuffle=True)\n",
    "# data_loader = DataLoader(dataset=dataset, batch_size=10, shuffle=True, num_workers=2)\n",
    "# 在jupyter上不能使用多线程,num_workers这个参数不能使用\n",
    "# num_workers这个参数决定了有几个进程来处理data loading。0意味着所有的数据都会被load进主进程。（默认为0）\n",
    "\n",
    "#遍历，获取其中的每个batch的结果\n",
    "for index, (label, context) in enumerate(data_loader):\n",
    "    print(index, label, context)\n",
    "    print(\"*\" * 100)\n",
    "    # 打印钱20个\n",
    "    if index > 5:\n",
    "        break"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "# 4. 加载自定义数据集"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz\n",
      "Downloading http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz to ./mini\\MNIST\\raw\\train-images-idx3-ubyte.gz\n"
     ]
    },
    {
     "data": {
      "text/plain": "  0%|          | 0/9912422 [00:00<?, ?it/s]",
      "application/vnd.jupyter.widget-view+json": {
       "version_major": 2,
       "version_minor": 0,
       "model_id": "d9b7bcabea7e49cdb48eea572c0bd188"
      }
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./mini\\MNIST\\raw\\train-images-idx3-ubyte.gz to ./mini\\MNIST\\raw\n",
      "\n",
      "Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz\n",
      "Downloading http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz to ./mini\\MNIST\\raw\\train-labels-idx1-ubyte.gz\n"
     ]
    },
    {
     "data": {
      "text/plain": "  0%|          | 0/28881 [00:00<?, ?it/s]",
      "application/vnd.jupyter.widget-view+json": {
       "version_major": 2,
       "version_minor": 0,
       "model_id": "ec529453b07a4856a344eb8365c7d117"
      }
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./mini\\MNIST\\raw\\train-labels-idx1-ubyte.gz to ./mini\\MNIST\\raw\n",
      "\n",
      "Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz\n",
      "Downloading http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz to ./mini\\MNIST\\raw\\t10k-images-idx3-ubyte.gz\n"
     ]
    },
    {
     "data": {
      "text/plain": "  0%|          | 0/1648877 [00:00<?, ?it/s]",
      "application/vnd.jupyter.widget-view+json": {
       "version_major": 2,
       "version_minor": 0,
       "model_id": "21d53632256d491dab18c5b92ce8d0e5"
      }
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./mini\\MNIST\\raw\\t10k-images-idx3-ubyte.gz to ./mini\\MNIST\\raw\n",
      "\n",
      "Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz\n",
      "Downloading http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz to ./mini\\MNIST\\raw\\t10k-labels-idx1-ubyte.gz\n"
     ]
    },
    {
     "data": {
      "text/plain": "  0%|          | 0/4542 [00:00<?, ?it/s]",
      "application/vnd.jupyter.widget-view+json": {
       "version_major": 2,
       "version_minor": 0,
       "model_id": "bcc32d352af446a7a7981ca416c264a2"
      }
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./mini\\MNIST\\raw\\t10k-labels-idx1-ubyte.gz to ./mini\\MNIST\\raw\n",
      "\n",
      "(<PIL.Image.Image image mode=L size=28x28 at 0x1C98D954970>, 5)\n"
     ]
    }
   ],
   "source": [
    "from torchvision.datasets import MNIST\n",
    "\n",
    "dataset = MNIST(root=\"./mini\", train=True, download=True, transform=None)\n",
    "\n",
    "print(dataset[0])\n",
    "print(\"下载结束\")"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "outputs": [],
   "source": [
    " # 下载结束，就可以直接使用\n",
    "from torchvision.datasets import MNIST\n",
    "\n",
    "dataset = MNIST(root=\"./mini\", train=True, download=False, transform=None)\n",
    "dataset[0][0].show()\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "# transforms 函数测试"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(2, 2, 3)\n",
      "tensor([[[ 48, 242],\n",
      "         [238,  86]],\n",
      "\n",
      "        [[ 80,  81],\n",
      "         [220, 204]],\n",
      "\n",
      "        [[249, 173],\n",
      "         [247, 192]]], dtype=torch.int32)\n",
      "tensor([[[ 48, 242],\n",
      "         [238,  86]],\n",
      "\n",
      "        [[ 80,  81],\n",
      "         [220, 204]],\n",
      "\n",
      "        [[249, 173],\n",
      "         [247, 192]]], dtype=torch.int32)\n",
      "torch.Size([3, 2, 2])\n"
     ]
    }
   ],
   "source": [
    "from torchvision import transforms\n",
    "import numpy as np\n",
    "\n",
    "# 首先看一下 transforms.ToTensor()的效果\n",
    "data = np.random.randint(0, 255, size=12)\n",
    "img = data.reshape(2, 2, 3)\n",
    "print(img.shape)\n",
    "img_tensor = transforms.ToTensor()(img)  # 转换成tensor\n",
    "print(img_tensor)\n",
    "# 这样子的效果和img_tensor效果是一样的\n",
    "print(torch.tensor(img).permute(2, 0, 1))\n",
    "print(img_tensor.shape)"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[[166., 184.],\n",
      "         [ 22.,  97.]],\n",
      "\n",
      "        [[ 65., 191.],\n",
      "         [228., 125.]],\n",
      "\n",
      "        [[ 72., 105.],\n",
      "         [  1., 107.]]])\n",
      "torch.Size([3, 2, 2])\n",
      "****************************************************************************************************\n",
      "torch.Size([3, 2, 2])\n",
      "tensor([[[156., 174.],\n",
      "         [ 12.,  87.]],\n",
      "\n",
      "        [[ 55., 181.],\n",
      "         [218., 115.]],\n",
      "\n",
      "        [[ 62.,  95.],\n",
      "         [ -9.,  97.]]])\n"
     ]
    }
   ],
   "source": [
    "from torchvision import transforms\n",
    "import numpy as np\n",
    "import torchvision\n",
    "\n",
    "data = np.random.randint(0, 255, size=12)\n",
    "img = data.reshape(2, 2, 3).astype(np.float32)\n",
    "img = transforms.ToTensor()(img)  # 转换成tensor\n",
    "print(img)\n",
    "print(img.shape)\n",
    "print(\"*\" * 100)\n",
    "\n",
    "norm_img = transforms.Normalize((10, 10, 10), (1, 1, 1))(img)  #进行规范化处理\n",
    "print(norm_img.shape)\n",
    "print(norm_img)\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "# 激活函数的使用"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([0, 0, 0, 1, 2])\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "import torch.nn.functional as F\n",
    "\n",
    "b = torch.tensor([-2, -1, 0, 1, 2])\n",
    "\n",
    "print(F.relu(b))"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[0.9894, 0.6713, 0.9827],\n",
      "        [0.0604, 0.3050, 0.7497],\n",
      "        [0.1217, 0.9426, 0.6780],\n",
      "        [0.5129, 0.4356, 0.2380]], dtype=torch.float64)\n",
      "torch.return_types.max(\n",
      "values=tensor([0.9894, 0.7497, 0.9426, 0.5129], dtype=torch.float64),\n",
      "indices=tensor([0, 2, 1, 0]))\n",
      "tensor([0, 2, 1, 0])\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import torch\n",
    "\n",
    "t1=torch.tensor(np.random.rand(4,3))\n",
    "print(t1)\n",
    "# 获取每一行中最大值的索引值\n",
    "print(t1.max(dim=-1))\n",
    "print(t1.max(dim=-1)[-1])"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}