{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import torch \n",
    "from UTILS.tensor_ops import __hash__, my_view, np_one_hot, np_repeat_at, np_softmax\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def item_random_mv(src,dst,prob,rand=False):\n",
    "    assert len(src.shape)==1; assert len(dst.shape)==1\n",
    "    if rand: np.random.shuffle(src)\n",
    "    len_src = len(src)\n",
    "    n_mv = (np.random.rand(len_src) < prob).sum()\n",
    "    print('n_mv',n_mv)\n",
    "    item_mv = src[range(len_src-n_mv,len_src)]\n",
    "    print('item_mv',item_mv)\n",
    "    src = src[range(0,0+len_src-n_mv)]\n",
    "    print('src',src)\n",
    "    dst = np.concatenate((item_mv, dst))\n",
    "    return src,dst\n",
    "\n",
    "vis_index = np.array([1,2,3,4,5])\n",
    "invis_index = np.array([6,7,8,9,10])\n",
    "print(item_random_mv(src=vis_index, dst=invis_index, prob=0.1, rand=True))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1, 2)\n",
      "(1, 2, 2, 4)\n"
     ]
    }
   ],
   "source": [
    "reward = np.array([[-0.5, 1]]) #(time, agent)   (3,2)                                (3,0)\n",
    "reward_logits = np.array([[  [[0,  0.1,  0,  0.12],[0,  0.1,  0.2,  0.1]],[[0, 0.1, 0, 0.12],[0.2, 0.1, 0, 0.12]]   ]]) #(time, agent, agent, 4)\n",
    "print(reward.shape)\n",
    "print(reward_logits.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "<ipython-input-7-07a2cba73353>:5: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n",
      "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n",
      "  reward_projection = lambda r: np.where(r<0, 3, r*2).astype(np.int)\n"
     ]
    }
   ],
   "source": [
    "n_timestep = reward_logits.shape[0]\n",
    "n_agent = reward_logits.shape[1]\n",
    "assert not ((reward != 0) & (reward!=1) & (reward!=-0.5) & (reward!=0.5)).any()\n",
    "# {-0.5, 0, 0.5, 1} -> {3<(-1), 0, 1, 2}\n",
    "reward_projection = lambda r: np.where(r<0, 3, r*2).astype(np.int)\n",
    "# apply projection\n",
    "reward_class = reward_projection(reward) # np_one_hot(,4)\n",
    "# ($time_step.$n_agent.) -> ($time_step.$n_agent_.$n_agent.)\n",
    "reward_class = np_repeat_at(reward_class, insert_dim=1, n_times=n_agent)\n",
    "# not zero reward\n",
    "not_zero_mask = (reward_class!=0)\n",
    "# reward_class_onehot\n",
    "reward_class_onehot = np_one_hot(reward_class, 4)\n",
    "# reward estimation\n",
    "reward_logits = np_softmax(reward_logits, axis=-1)\n",
    "prediction_rating = np.take_along_axis(reward_logits, axis=-1, \n",
    "    indices=np.expand_dims(reward_class, -1)).squeeze(-1)\n",
    "correct_prediction = (np.argmax(reward_logits, -1) == reward_class)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[[ True  True]\n",
      "  [ True  True]]]\n"
     ]
    }
   ],
   "source": [
    "confidence_threshhold = 0.25*1.05\n",
    "eye_mat = np_repeat_at(np.eye(n_agent), insert_dim=0, n_times=n_timestep)\n",
    "compasion_matrix = correct_prediction & (prediction_rating > confidence_threshhold) & not_zero_mask | (eye_mat==1)\n",
    "print(compasion_matrix)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[0.25 0.25]]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "<ipython-input-17-3b9ad3e8d18f>:2: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n",
      "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n",
      "  compasion_matrix = compasion_matrix.astype(np.int)\n"
     ]
    }
   ],
   "source": [
    "# compasion_matrix = (prediction_rating > confidence_threshhold) & not_zero_mask & (eye_mat==0)\n",
    "compasion_matrix = compasion_matrix.astype(np.int)\n",
    "\n",
    "# 计算分解后的reward，因为reward可能被多个智能体分解\n",
    "n_com = compasion_matrix.sum(-2)\n",
    "assert not (n_com==0).any()\n",
    "reward_decmp = np.where(n_com!=0, reward/n_com, reward) # reward/(n_com+1e-7)\n",
    "# https://numpy.org/doc/stable/reference/generated/numpy.matmul.html\n",
    "compassion_reward = np.matmul(compasion_matrix, np.expand_dims(reward_decmp,-1)).squeeze(-1)\n",
    "print(compassion_reward)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [],
   "source": [
    "'''\n",
    "函数说明：在有限的、不均衡的多标签数据集中，按照预设的比例，取出尽可能多的样本\n",
    "'''\n",
    "def sample_balance(x, y, n_class, weight=None):\n",
    "    if weight is None: weight = torch.ones(n_class, device=x.device)\n",
    "    else: weight = torch.Tensor(weight, device=x.device)\n",
    "    n_instance = torch.zeros(n_class, device=x.device)\n",
    "    indices = [None]*n_class\n",
    "    for i in range(n_class):\n",
    "        indices[i] = torch.where(y==i)[0]\n",
    "        n_instance[i] = len(indices[i])\n",
    "    ratio = n_instance/weight\n",
    "    bottle_neck = torch.argmin(n_instance/weight)\n",
    "    r = ratio[bottle_neck]\n",
    "    n_sample = (r*weight).long()\n",
    "    # print(n_instance, n_sample)\n",
    "    new_indices = [indices[i][torch.randperm(n_sample[i])] for i in range(n_class)]\n",
    "    # print(new_indices)\n",
    "    new_indices_ = torch.cat(new_indices)\n",
    "    assert len(new_indices_) == sum(n_sample)\n",
    "    return x[new_indices_], y[new_indices_]\n",
    "\n",
    "'''\n",
    "测试代码\n",
    "'''\n",
    "x = torch.rand(200, 4)\n",
    "y1 = torch.rand(100, 4)\n",
    "y2 = torch.rand(100, 4)\n",
    "y2[:, 0] += 1\n",
    "y = torch.cat((y1,y2))\n",
    "y = torch.argmax(y, -1)\n",
    "print(y)\n",
    "n_class = 4\n",
    "weight = [2,1,1,1]\n",
    "sample_balance(x,y,4,weight)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([0, 2, 1, 3, 3, 1, 0, 0, 1, 3, 1, 3, 2, 2, 2, 2, 3, 2, 2, 1, 0, 3, 3, 3,\n",
      "        3, 2, 2, 2, 1, 3, 3, 1, 0, 0, 2, 0, 1, 2, 0, 3, 1, 3, 1, 3, 2, 3, 1, 2,\n",
      "        0, 2, 1, 1, 3, 2, 3, 2, 1, 2, 1, 1, 3, 1, 0, 3, 3, 2, 0, 3, 2, 0, 2, 0,\n",
      "        0, 0, 3, 2, 3, 1, 1, 1, 0, 3, 1, 3, 1, 1, 3, 1, 0, 0, 1, 2, 0, 0, 2, 1,\n",
      "        0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
      "        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
      "        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
      "        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
      "        0, 0, 0, 0, 0, 0, 0, 0])\n",
      "tensor([122.,  26.,  25.,  27.]) tensor([50, 25, 25, 25])\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(tensor([[2.2656e-02, 4.1251e-01, 3.2707e-01, 7.2534e-01],\n",
       "         [1.2332e-01, 2.7637e-01, 4.8348e-01, 2.7835e-01],\n",
       "         [1.2237e-01, 7.0544e-01, 6.5689e-01, 5.1355e-01],\n",
       "         [3.9261e-02, 6.1284e-01, 5.4430e-01, 6.9022e-02],\n",
       "         [7.5210e-01, 5.6684e-01, 2.5894e-01, 5.2485e-01],\n",
       "         [8.1709e-01, 8.4347e-01, 2.3117e-01, 1.1277e-01],\n",
       "         [8.4153e-01, 5.2814e-01, 6.3028e-01, 2.2639e-01],\n",
       "         [8.4371e-01, 6.2015e-01, 4.6847e-01, 3.5423e-02],\n",
       "         [6.9924e-01, 6.0353e-01, 9.3364e-01, 1.7731e-02],\n",
       "         [7.1736e-01, 9.8774e-01, 4.0972e-01, 9.4791e-01],\n",
       "         [1.3912e-01, 5.7476e-02, 2.9873e-01, 6.4819e-01],\n",
       "         [3.0322e-01, 5.7693e-01, 3.1014e-01, 1.8827e-01],\n",
       "         [9.3742e-02, 3.8578e-01, 7.3496e-01, 1.1003e-01],\n",
       "         [1.2636e-04, 2.7071e-01, 4.9675e-01, 3.1337e-01],\n",
       "         [1.2764e-01, 4.1302e-02, 3.8436e-01, 2.0502e-01],\n",
       "         [2.2064e-01, 2.3158e-01, 3.3987e-01, 3.1554e-01],\n",
       "         [9.2388e-01, 5.5447e-01, 6.8357e-02, 5.1233e-01],\n",
       "         [6.4991e-01, 8.4204e-01, 9.1599e-01, 6.1346e-01],\n",
       "         [3.4666e-02, 3.4611e-01, 5.1735e-01, 3.2951e-01],\n",
       "         [3.6104e-01, 5.8884e-01, 7.0039e-02, 7.0484e-01],\n",
       "         [3.7024e-01, 9.7545e-01, 5.9680e-01, 8.2397e-01],\n",
       "         [6.1316e-01, 2.8431e-01, 6.6727e-01, 5.2738e-01],\n",
       "         [7.8263e-01, 4.8445e-01, 2.3528e-02, 5.9693e-02],\n",
       "         [6.2185e-01, 5.0612e-01, 9.9202e-03, 2.2032e-01],\n",
       "         [1.7392e-01, 6.6964e-02, 7.5533e-01, 1.6540e-01],\n",
       "         [5.7367e-01, 8.7884e-01, 9.5299e-03, 3.7473e-01],\n",
       "         [9.7311e-01, 1.7888e-01, 4.1985e-01, 1.9307e-01],\n",
       "         [5.2804e-01, 1.4585e-01, 2.4079e-01, 4.7394e-01],\n",
       "         [8.8154e-01, 9.2095e-01, 7.2604e-01, 9.1100e-01],\n",
       "         [5.5989e-02, 8.2802e-01, 8.8680e-01, 2.1731e-01],\n",
       "         [6.0276e-01, 6.7213e-01, 4.3302e-01, 1.9685e-01],\n",
       "         [5.8194e-01, 1.0595e-01, 1.9474e-01, 1.3795e-01],\n",
       "         [6.9914e-01, 7.5442e-01, 9.1586e-01, 5.9908e-01],\n",
       "         [3.0518e-01, 5.8883e-02, 7.2842e-01, 9.3072e-01],\n",
       "         [9.7976e-01, 6.1523e-01, 2.0323e-01, 4.9963e-02],\n",
       "         [1.3042e-01, 7.2057e-02, 1.3992e-01, 4.9736e-01],\n",
       "         [8.7976e-01, 4.9049e-01, 6.3431e-01, 6.5175e-01],\n",
       "         [5.5337e-01, 2.9666e-01, 6.6276e-01, 7.7813e-01],\n",
       "         [5.8103e-01, 1.0299e-01, 7.5592e-01, 7.9875e-01],\n",
       "         [4.7456e-01, 8.2955e-01, 4.2449e-01, 8.5445e-01],\n",
       "         [4.4446e-01, 4.8673e-01, 4.9418e-01, 4.5778e-01],\n",
       "         [7.0508e-01, 4.1622e-01, 9.4946e-01, 7.5909e-01],\n",
       "         [7.3016e-01, 9.3456e-01, 9.5596e-01, 8.7730e-01],\n",
       "         [4.8613e-01, 3.4998e-01, 2.9868e-01, 3.3276e-01],\n",
       "         [4.3059e-01, 6.6159e-01, 1.5877e-01, 1.6661e-01],\n",
       "         [3.6456e-01, 3.6253e-02, 8.6169e-01, 5.0426e-02],\n",
       "         [5.5974e-01, 7.9177e-01, 5.7444e-01, 7.1195e-01],\n",
       "         [7.4411e-01, 3.9740e-01, 5.2682e-01, 5.3969e-01],\n",
       "         [6.7271e-01, 2.9321e-01, 8.3561e-01, 9.8787e-01],\n",
       "         [1.4162e-01, 8.0065e-02, 5.7178e-01, 2.2747e-01],\n",
       "         [3.2958e-01, 6.9783e-01, 3.1080e-01, 1.5820e-01],\n",
       "         [5.2147e-01, 8.8479e-01, 7.5762e-01, 6.8521e-01],\n",
       "         [7.5218e-01, 1.0971e-01, 2.1612e-01, 7.6277e-02],\n",
       "         [4.8165e-01, 7.0991e-01, 1.8270e-01, 4.9710e-01],\n",
       "         [6.1859e-01, 8.5819e-01, 7.8146e-01, 8.4214e-02],\n",
       "         [3.8494e-01, 3.9859e-01, 9.4629e-01, 9.4274e-01],\n",
       "         [7.0757e-01, 1.8575e-01, 3.4942e-01, 2.2342e-01],\n",
       "         [6.4433e-01, 2.7609e-01, 4.5498e-01, 5.7752e-01],\n",
       "         [9.3791e-01, 9.8797e-01, 3.0498e-01, 6.4319e-01],\n",
       "         [8.2265e-01, 1.5506e-01, 5.4008e-01, 4.2207e-01],\n",
       "         [8.7909e-01, 2.2812e-01, 4.6589e-01, 2.9760e-02],\n",
       "         [3.5911e-01, 3.4477e-02, 6.3150e-01, 2.1478e-01],\n",
       "         [3.2011e-01, 2.7809e-01, 2.4773e-01, 9.4035e-01],\n",
       "         [8.8432e-01, 6.9678e-01, 4.9151e-01, 1.5742e-01],\n",
       "         [1.9140e-01, 8.9089e-01, 4.2106e-01, 3.7427e-01],\n",
       "         [1.9010e-01, 2.9831e-01, 1.7468e-02, 5.7440e-01],\n",
       "         [2.0636e-01, 3.9656e-01, 8.1871e-01, 2.0767e-01],\n",
       "         [4.1720e-03, 2.2873e-01, 8.8214e-01, 9.8670e-01],\n",
       "         [6.6143e-01, 8.7294e-01, 7.1422e-01, 1.4158e-01],\n",
       "         [8.1962e-01, 7.2829e-01, 6.5320e-01, 2.5906e-01],\n",
       "         [2.7520e-01, 8.2150e-01, 3.5965e-01, 7.5347e-01],\n",
       "         [8.0926e-01, 5.6970e-02, 5.5013e-01, 6.5356e-01],\n",
       "         [7.7979e-01, 3.4357e-01, 3.7659e-01, 2.2718e-01],\n",
       "         [1.3910e-01, 3.8669e-01, 2.6026e-01, 2.5006e-01],\n",
       "         [6.5214e-01, 9.8991e-02, 1.0387e-01, 9.0030e-01],\n",
       "         [3.4063e-01, 8.4063e-01, 4.7955e-01, 1.2706e-01],\n",
       "         [2.9386e-01, 7.1552e-01, 3.6486e-01, 5.8860e-01],\n",
       "         [4.5193e-02, 8.1407e-01, 1.9657e-01, 3.5336e-01],\n",
       "         [6.6552e-01, 9.8028e-01, 5.2913e-01, 9.9010e-01],\n",
       "         [2.1723e-01, 5.2550e-01, 8.7075e-01, 4.3510e-01],\n",
       "         [7.6239e-01, 6.0861e-02, 7.1441e-01, 5.0908e-01],\n",
       "         [2.4082e-01, 7.5111e-01, 4.8601e-01, 4.3937e-01],\n",
       "         [3.7768e-01, 3.3402e-01, 6.0788e-01, 4.1286e-01],\n",
       "         [1.4396e-02, 4.2977e-01, 6.8392e-01, 7.8476e-01],\n",
       "         [1.1756e-01, 1.9775e-01, 8.2999e-01, 1.0070e-01],\n",
       "         [5.9161e-01, 4.4281e-01, 5.3770e-01, 6.6860e-01],\n",
       "         [7.1888e-01, 7.1379e-02, 2.3718e-01, 1.4560e-01],\n",
       "         [2.9200e-01, 9.5538e-02, 6.7803e-01, 4.4499e-01],\n",
       "         [7.3763e-01, 9.2306e-01, 2.0729e-01, 3.4376e-01],\n",
       "         [8.6086e-01, 5.4867e-01, 7.1213e-01, 9.3932e-01],\n",
       "         [6.1866e-01, 9.3718e-01, 2.2751e-01, 4.8115e-01],\n",
       "         [9.2230e-01, 7.1402e-01, 2.3862e-01, 3.9676e-01],\n",
       "         [6.8319e-01, 7.1444e-01, 4.1904e-01, 4.9722e-01],\n",
       "         [2.4593e-01, 5.8055e-01, 8.2018e-01, 2.6634e-01],\n",
       "         [4.4406e-01, 2.3790e-01, 5.7055e-01, 1.4531e-01],\n",
       "         [2.0483e-01, 9.9219e-01, 6.6442e-01, 4.2866e-01],\n",
       "         [9.1008e-02, 1.2661e-01, 3.6259e-01, 5.3942e-01],\n",
       "         [8.7538e-02, 4.7590e-01, 3.0990e-01, 8.6867e-01],\n",
       "         [3.2461e-01, 6.9892e-01, 5.7183e-01, 5.5162e-01],\n",
       "         [2.4944e-01, 3.5048e-01, 9.7905e-01, 2.3639e-02],\n",
       "         [6.3692e-02, 1.5951e-01, 3.3593e-01, 2.7508e-01],\n",
       "         [3.0263e-01, 5.5209e-01, 6.2602e-01, 5.6656e-01],\n",
       "         [7.0293e-01, 9.8573e-01, 8.8896e-01, 7.8050e-01],\n",
       "         [7.4468e-01, 6.4487e-01, 5.7659e-01, 3.5251e-01],\n",
       "         [7.1979e-01, 6.3494e-01, 4.0070e-01, 7.9347e-01],\n",
       "         [6.4583e-01, 8.2382e-01, 7.0390e-01, 7.1337e-01],\n",
       "         [5.7380e-02, 9.7241e-01, 6.8935e-02, 4.9069e-01],\n",
       "         [1.7902e-01, 7.0434e-01, 1.6399e-01, 4.4931e-01],\n",
       "         [6.9432e-01, 3.0786e-01, 7.6564e-01, 4.7100e-01],\n",
       "         [5.7675e-01, 1.1787e-01, 4.9835e-01, 5.0357e-01],\n",
       "         [8.6340e-01, 7.3418e-01, 2.8867e-01, 4.3377e-01],\n",
       "         [6.0866e-01, 6.7862e-01, 9.5616e-01, 1.2781e-01],\n",
       "         [1.1134e-01, 5.3050e-02, 7.7990e-01, 2.2883e-01],\n",
       "         [7.3697e-01, 3.2017e-01, 5.7552e-01, 5.8605e-01],\n",
       "         [3.6977e-01, 6.8221e-01, 5.6553e-01, 3.7971e-01],\n",
       "         [1.6314e-03, 4.7031e-01, 8.5392e-01, 7.7411e-01],\n",
       "         [9.5473e-01, 8.2436e-01, 6.7413e-01, 1.0078e-01],\n",
       "         [8.4313e-02, 9.3682e-01, 2.1159e-01, 5.7381e-01],\n",
       "         [7.1548e-01, 3.7137e-01, 7.5895e-01, 5.6446e-01],\n",
       "         [2.6586e-02, 1.6307e-01, 2.2181e-02, 2.7763e-02],\n",
       "         [2.7594e-01, 1.3782e-01, 1.9548e-01, 3.6369e-01],\n",
       "         [7.4180e-01, 3.7352e-01, 5.6470e-01, 2.7063e-01],\n",
       "         [8.7251e-01, 8.3326e-01, 8.5085e-01, 7.0252e-01],\n",
       "         [3.6393e-01, 8.9089e-01, 1.5523e-01, 8.2894e-01],\n",
       "         [1.6476e-01, 6.3165e-01, 3.7895e-01, 9.3383e-03]]),\n",
       " tensor([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
       "         0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
       "         0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
       "         1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n",
       "         2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n",
       "         3, 3, 3, 3, 3]))"
      ]
     },
     "execution_count": 45,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([21., 28., 24., 27.]) tensor([ 7, 28, 21, 14])\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(tensor([[8.8547e-03, 6.8349e-01, 1.8454e-01, 5.9639e-01],\n",
       "         [8.9973e-01, 7.6351e-01, 7.7360e-01, 8.7484e-02],\n",
       "         [7.0845e-01, 4.7663e-01, 7.1757e-01, 1.9168e-02],\n",
       "         [1.1895e-01, 1.1693e-01, 5.9661e-01, 5.2639e-01],\n",
       "         [9.9516e-01, 5.8231e-01, 9.0453e-01, 3.6128e-01],\n",
       "         [2.5497e-01, 2.5144e-01, 4.4595e-01, 2.2235e-01],\n",
       "         [6.0990e-01, 2.6511e-01, 5.4970e-01, 2.0068e-01],\n",
       "         [4.5902e-01, 9.4615e-01, 9.5732e-01, 8.1278e-01],\n",
       "         [4.0710e-01, 6.8418e-02, 4.1449e-01, 5.1667e-01],\n",
       "         [4.8025e-02, 6.0615e-01, 1.8411e-01, 9.6834e-01],\n",
       "         [9.1179e-01, 3.8608e-01, 5.2052e-01, 5.5045e-02],\n",
       "         [3.2310e-01, 9.7595e-01, 1.5475e-01, 3.1984e-01],\n",
       "         [9.2913e-01, 9.3347e-01, 3.7687e-01, 8.3900e-01],\n",
       "         [2.7752e-01, 3.2392e-02, 8.7160e-01, 3.2852e-01],\n",
       "         [4.2996e-01, 6.1758e-01, 5.0515e-03, 3.5394e-01],\n",
       "         [9.4292e-02, 9.4540e-01, 6.7231e-01, 7.4819e-01],\n",
       "         [6.0542e-01, 6.2166e-01, 4.4464e-01, 8.6807e-01],\n",
       "         [1.8750e-01, 6.4619e-01, 9.1007e-01, 7.5251e-01],\n",
       "         [5.5168e-01, 4.5295e-01, 9.8402e-01, 5.9227e-01],\n",
       "         [7.5946e-01, 5.6623e-01, 6.7260e-01, 6.2542e-02],\n",
       "         [9.5978e-01, 5.7164e-01, 5.3019e-01, 1.6708e-01],\n",
       "         [4.4175e-01, 3.2513e-01, 6.6611e-01, 9.6703e-01],\n",
       "         [1.6086e-01, 4.2838e-01, 5.5933e-01, 5.7707e-01],\n",
       "         [3.0079e-01, 3.6859e-01, 1.3561e-01, 3.7671e-01],\n",
       "         [5.7892e-01, 6.8439e-01, 9.6202e-05, 9.6318e-01],\n",
       "         [9.4604e-01, 8.9798e-01, 5.7328e-01, 4.5702e-01],\n",
       "         [7.7579e-01, 2.0965e-01, 6.5949e-01, 3.7299e-01],\n",
       "         [4.1597e-02, 7.1866e-01, 1.0764e-01, 3.9976e-01],\n",
       "         [5.1728e-01, 4.5593e-01, 3.3406e-01, 7.4775e-01],\n",
       "         [7.8438e-01, 7.0168e-01, 1.3314e-01, 9.6558e-01],\n",
       "         [9.7835e-01, 2.2836e-01, 6.7686e-01, 5.2872e-01],\n",
       "         [9.6186e-01, 5.1433e-01, 5.0227e-01, 4.0211e-01],\n",
       "         [4.2444e-01, 5.0426e-01, 6.6820e-01, 6.0754e-01],\n",
       "         [2.3725e-02, 2.1865e-01, 7.5670e-01, 1.0313e-01],\n",
       "         [3.8718e-01, 8.0342e-01, 7.9793e-01, 4.4246e-01],\n",
       "         [3.4005e-01, 3.4970e-01, 2.7429e-01, 8.5657e-02],\n",
       "         [4.4419e-02, 9.2665e-01, 9.0367e-02, 1.7727e-01],\n",
       "         [8.4956e-01, 7.5547e-01, 9.8201e-01, 5.2711e-01],\n",
       "         [4.0429e-01, 3.7317e-01, 9.0919e-01, 6.4667e-01],\n",
       "         [6.1935e-01, 4.7308e-01, 8.6452e-01, 8.8543e-01],\n",
       "         [8.1716e-01, 9.4149e-01, 6.3729e-01, 7.6827e-01],\n",
       "         [9.7506e-01, 2.1653e-01, 1.6566e-01, 8.3743e-01],\n",
       "         [2.9364e-01, 3.1195e-01, 8.6243e-01, 5.8313e-01],\n",
       "         [6.6883e-02, 2.7530e-01, 2.6103e-02, 3.8539e-01],\n",
       "         [9.5625e-02, 9.3097e-01, 8.9994e-01, 9.7591e-01],\n",
       "         [5.7169e-01, 1.5104e-01, 6.3604e-01, 7.9135e-01],\n",
       "         [5.0190e-01, 3.9781e-01, 1.2227e-01, 1.2033e-01],\n",
       "         [4.4073e-01, 7.5632e-01, 4.6151e-01, 8.6086e-01],\n",
       "         [5.5927e-01, 1.8785e-01, 4.7125e-01, 4.5979e-01],\n",
       "         [9.1259e-01, 5.8126e-01, 2.5585e-01, 1.2254e-01],\n",
       "         [4.3425e-01, 1.4031e-01, 5.2981e-01, 6.8988e-01],\n",
       "         [7.3257e-01, 8.2765e-01, 3.1156e-01, 9.4873e-01],\n",
       "         [5.9106e-01, 9.4014e-01, 1.7112e-01, 8.1184e-01],\n",
       "         [4.9881e-01, 6.6054e-01, 1.5642e-01, 1.7590e-01],\n",
       "         [6.4579e-01, 2.3445e-01, 9.0251e-01, 5.8637e-01],\n",
       "         [6.6245e-01, 6.6789e-01, 5.7191e-01, 4.9479e-01],\n",
       "         [9.3468e-02, 8.6228e-01, 3.1208e-01, 6.0619e-02],\n",
       "         [2.9091e-02, 5.6100e-01, 7.8672e-01, 2.0875e-01],\n",
       "         [5.4506e-01, 8.3130e-01, 6.0225e-01, 7.8024e-02],\n",
       "         [3.4403e-01, 5.2074e-01, 9.0797e-01, 6.2229e-01],\n",
       "         [2.6991e-01, 2.3489e-01, 3.5331e-01, 3.0846e-01],\n",
       "         [3.7245e-01, 1.4209e-01, 2.6778e-01, 4.0100e-03],\n",
       "         [2.4241e-01, 7.0678e-01, 6.2711e-01, 8.3375e-01],\n",
       "         [8.9032e-02, 9.5266e-01, 8.2357e-01, 1.5301e-04],\n",
       "         [7.3492e-01, 5.6768e-01, 3.5774e-01, 2.8459e-01],\n",
       "         [9.8599e-01, 9.5469e-01, 4.9341e-01, 3.7670e-01],\n",
       "         [5.8036e-01, 5.0797e-01, 3.4183e-01, 9.8584e-01],\n",
       "         [8.7679e-01, 3.4146e-01, 4.3295e-01, 9.1700e-01],\n",
       "         [3.2671e-01, 8.6763e-01, 7.5475e-01, 8.1773e-01],\n",
       "         [6.2849e-01, 2.8068e-01, 4.1746e-01, 8.0787e-01]]),\n",
       " tensor([0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
       "         1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n",
       "         2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]))"
      ]
     },
     "execution_count": 41,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x = torch.rand(100, 4)\n",
    "y = torch.rand(100, 4)\n",
    "y = torch.argmax(y, -1)\n",
    "\n",
    "# return x[new_indices_], y[new_indices_]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'new_indices_' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-34-7242e2f088a7>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnew_indices_\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m: name 'new_indices_' is not defined"
     ]
    }
   ],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([31., 22., 22., 25.]) tensor([ 5, 22, 16, 11])\n",
      "[tensor([ 9,  0, 11,  1,  8]), tensor([56, 39, 55, 85, 25, 10, 21, 47, 89, 20,  7, 12, 43, 66, 26, 14, 32, 40,\n",
      "        76, 16, 79, 67]), tensor([72,  4, 52, 22, 17, 63, 81,  2, 33,  3, 64, 38, 28, 75,  6, 37]), tensor([57, 58,  5, 23, 19, 35, 49, 51, 13, 50, 36])]\n"
     ]
    }
   ],
   "source": [
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "interpreter": {
   "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
  },
  "kernelspec": {
   "display_name": "Python 3.8.5 64-bit",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.5"
  },
  "orig_nbformat": 4
 },
 "nbformat": 4,
 "nbformat_minor": 2
}