{
 "metadata": {
  "orig_nbformat": 2,
  "kernelspec": {
   "name": "python3",
   "display_name": "Python 3",
   "language": "python"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2,
 "cells": [
  {
   "source": [
    "# 分析troch arange \n",
    "\n",
    "y = torch.arange(0, output_size).unsqueeze(1).repeat(1, output_size)"
   ],
   "cell_type": "markdown",
   "metadata": {}
  },
  {
   "cell_type": "code",
   "execution_count": 109,
   "metadata": {},
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "batch_size 1\noutput_size 16\ntensor([[[[[2.6735e+01, 2.6455e+01, 7.0358e+01,  ..., 2.6317e-03,\n            7.1214e-03, 3.5145e-02],\n           [2.6735e+01, 2.5542e+01, 8.8886e+01,  ..., 7.1214e-03,\n            1.1687e-02, 3.1144e-02],\n           [1.9919e+01, 2.2165e+01, 2.5636e+02,  ..., 3.1144e-02,\n            5.0331e-02, 5.6652e-02]],\n\n          [[4.7500e+01, 2.7970e+01, 1.0237e+02,  ..., 7.5541e-04,\n            2.0507e-03, 2.5957e-02],\n           [4.8999e+01, 2.6735e+01, 1.2933e+02,  ..., 1.5012e-03,\n            3.3766e-03, 1.7986e-02],\n           [5.2843e+01, 2.3394e+01, 3.0923e+02,  ..., 7.5772e-03,\n            1.4957e-02, 2.5957e-02]],\n\n          [[8.0500e+01, 2.7745e+01, 1.0237e+02,  ..., 2.0507e-03,\n            2.1827e-03, 5.3403e-02],\n           [8.0500e+01, 2.7005e+01, 1.6606e+02,  ..., 5.5549e-03,\n            5.5549e-03, 3.1144e-02],\n           [8.0500e+01, 2.3781e+01, 3.0923e+02,  ..., 9.7085e-03,\n            1.4957e-02, 3.5145e-02]],\n\n          ...,\n\n          [[4.3638e+02, 2.3394e+01, 9.0341e+01,  ..., 1.5012e-03,\n            4.0701e-03, 7.5772e-03],\n           [4.3638e+02, 2.4157e+01, 1.4655e+02,  ..., 4.0701e-03,\n            2.2977e-02, 7.5772e-03],\n           [4.3050e+02, 2.3781e+01, 3.5040e+02,  ..., 1.3223e-02,\n            1.1920e-01, 2.0332e-02]],\n\n          [[4.7059e+02, 2.3394e+01, 7.9726e+01,  ..., 3.5697e-04,\n            1.1695e-03, 3.5936e-03],\n           [4.6792e+02, 2.3781e+01, 1.0072e+02,  ..., 1.5012e-03,\n            6.6929e-03, 4.0701e-03],\n           [4.6350e+02, 2.3781e+01, 3.0923e+02,  ..., 1.6915e-02,\n            9.5349e-02, 2.0332e-02]],\n\n          [[4.8526e+02, 2.3394e+01, 6.2090e+01,  ..., 2.8009e-03,\n            4.6096e-03, 2.0332e-02],\n           [4.8555e+02, 2.2165e+01, 8.8886e+01,  ..., 9.7085e-03,\n            1.9124e-02, 2.7585e-02],\n           [4.9255e+02, 2.0385e+01, 2.9049e+02,  ..., 6.0087e-02,\n            1.3296e-01, 8.5099e-02]]],\n\n\n         [[[2.6455e+01, 4.9990e+01, 5.1475e+01,  ..., 8.5590e-04,\n            1.4104e-03, 7.1214e-03],\n           [2.7970e+01, 4.9990e+01, 6.9225e+01,  ..., 2.0507e-03,\n            2.1827e-03, 9.1256e-03],\n           [2.0385e+01, 5.2385e+01, 2.5636e+02,  ..., 2.4423e-02,\n            2.7585e-02, 3.9639e-02]],\n\n          [[4.1005e+01, 4.9990e+01, 8.4867e+01,  ..., 4.8786e-04,\n            3.5697e-04, 4.3315e-03],\n           [4.4081e+01, 4.8500e+01, 1.0722e+02,  ..., 7.5541e-04,\n            3.5697e-04, 3.3766e-03],\n           [5.0965e+01, 5.0480e+01, 3.2917e+02,  ..., 7.1214e-03,\n            2.6317e-03, 1.9124e-02]],\n\n          [[7.7520e+01, 4.4081e+01, 7.4895e+01,  ..., 1.2448e-03,\n            3.5697e-04, 3.5936e-03],\n           [8.0000e+01, 4.8000e+01, 1.2933e+02,  ..., 5.2201e-03,\n            4.5831e-04, 3.5936e-03],\n           [8.0999e+01, 5.3734e+01, 3.5040e+02,  ..., 1.7986e-02,\n            4.9054e-03, 2.1615e-02]],\n\n          ...,\n\n          [[4.3939e+02, 4.5035e+01, 9.0341e+01,  ..., 2.8009e-03,\n            1.0322e-03, 1.7007e-03],\n           [4.3859e+02, 4.8000e+01, 1.2149e+02,  ..., 7.5772e-03,\n            4.3315e-03, 2.6317e-03],\n           [4.3200e+02, 5.5394e+01, 4.2266e+02,  ..., 3.5145e-02,\n            5.3403e-02, 2.0332e-02]],\n\n          [[4.7059e+02, 4.2707e+01, 7.4895e+01,  ..., 7.0967e-04,\n            3.5697e-04, 7.5541e-04],\n           [4.6792e+02, 4.4081e+01, 8.8886e+01,  ..., 1.5012e-03,\n            1.5978e-03, 1.2448e-03],\n           [4.6500e+02, 5.4585e+01, 3.7300e+02,  ..., 1.9124e-02,\n            6.3715e-02, 2.1615e-02]],\n\n          [[4.8555e+02, 4.2266e+01, 5.8328e+01,  ..., 2.8009e-03,\n            1.4104e-03, 5.5549e-03],\n           [4.8526e+02, 4.4555e+01, 6.9225e+01,  ..., 8.0620e-03,\n            7.1214e-03, 6.2899e-03],\n           [4.9401e+02, 5.2843e+01, 3.0923e+02,  ..., 4.2088e-02,\n            8.5099e-02, 5.3403e-02]]],\n\n\n         [[[2.4874e+01, 8.1496e+01, 5.8328e+01,  ..., 3.5936e-03,\n            3.8245e-03, 2.1615e-02],\n           [2.6735e+01, 8.2480e+01, 7.3689e+01,  ..., 5.5549e-03,\n            3.3766e-03, 1.4957e-02],\n           [2.0385e+01, 8.5734e+01, 2.2624e+02,  ..., 3.3086e-02,\n            3.1144e-02, 3.9639e-02]],\n\n          [[4.2266e+01, 8.4385e+01, 7.9726e+01,  ..., 6.6929e-03,\n            1.4104e-03, 2.5957e-02],\n           [4.3157e+01, 8.3445e+01, 1.0072e+02,  ..., 4.3315e-03,\n            8.5590e-04, 1.1687e-02],\n           [4.9990e+01, 8.5734e+01, 2.7289e+02,  ..., 2.0332e-02,\n            4.3315e-03, 2.0332e-02]],\n\n          [[8.1496e+01, 8.0000e+01, 7.4895e+01,  ..., 1.1687e-02,\n            1.4104e-03, 2.5957e-02],\n           [8.0500e+01, 8.2480e+01, 1.2149e+02,  ..., 3.5145e-02,\n            1.3250e-03, 1.6915e-02],\n           [8.1496e+01, 8.6165e+01, 3.0923e+02,  ..., 7.1591e-02,\n            7.5772e-03, 3.5145e-02]],\n\n          ...,\n\n          [[4.3859e+02, 7.8010e+01, 7.9726e+01,  ..., 5.5549e-03,\n            1.0322e-03, 4.6096e-03],\n           [4.3638e+02, 7.8010e+01, 1.0072e+02,  ..., 1.7986e-02,\n            2.9810e-03, 7.1214e-03],\n           [4.3300e+02, 8.5293e+01, 3.7300e+02,  ..., 1.1920e-01,\n            2.9312e-02, 5.6652e-02]],\n\n          [[4.6450e+02, 7.8504e+01, 7.4895e+01,  ..., 4.9054e-03,\n            1.7007e-03, 3.5936e-03],\n           [4.6648e+02, 7.8010e+01, 1.0072e+02,  ..., 7.5772e-03,\n            5.2201e-03, 4.3315e-03],\n           [4.6745e+02, 8.6585e+01, 3.9706e+02,  ..., 7.1591e-02,\n            8.5099e-02, 5.0331e-02]],\n\n          [[4.8822e+02, 7.8010e+01, 6.2090e+01,  ..., 2.0332e-02,\n            1.1687e-02, 2.0332e-02],\n           [4.8822e+02, 7.9001e+01, 7.8442e+01,  ..., 2.5957e-02,\n            3.5145e-02, 1.9124e-02],\n           [4.9650e+02, 8.3919e+01, 3.2917e+02,  ..., 8.5099e-02,\n            1.2592e-01, 8.5099e-02]]],\n\n\n         ...,\n\n\n         [[[2.1734e+01, 4.3350e+02, 5.8328e+01,  ..., 1.5906e-02,\n            7.1214e-03, 1.2592e-01],\n           [2.4157e+01, 4.3050e+02, 7.8442e+01,  ..., 2.4423e-02,\n            7.5772e-03, 9.0093e-02],\n           [1.8965e+01, 4.3250e+02, 2.7289e+02,  ..., 8.0357e-02,\n            5.0331e-02, 1.6452e-01]],\n\n          [[4.5035e+01, 4.3350e+02, 8.4867e+01,  ..., 2.4423e-02,\n            5.2201e-03, 1.5610e-01],\n           [4.6504e+01, 4.3100e+02, 1.0072e+02,  ..., 1.9124e-02,\n            5.9111e-03, 1.1920e-01],\n           [4.9496e+01, 4.3200e+02, 2.7289e+02,  ..., 7.5858e-02,\n            2.1615e-02, 2.5683e-01]],\n\n          [[8.2965e+01, 4.3200e+02, 7.0358e+01,  ..., 3.1144e-02,\n            1.0328e-02, 1.0088e-01],\n           [8.2480e+01, 4.3200e+02, 1.1413e+02,  ..., 5.3403e-02,\n            1.4957e-02, 1.2592e-01],\n           [8.0000e+01, 4.3050e+02, 2.9049e+02,  ..., 1.0669e-01,\n            4.2088e-02, 2.0181e-01]],\n\n          ...,\n\n          [[4.3399e+02, 4.3448e+02, 7.4895e+01,  ..., 4.4681e-02,\n            6.0087e-02, 8.0357e-02],\n           [4.3773e+02, 4.3300e+02, 1.1413e+02,  ..., 8.0357e-02,\n            1.4805e-01, 8.0357e-02],\n           [4.3816e+02, 4.3729e+02, 3.7300e+02,  ..., 2.3371e-01,\n            2.9421e-01, 2.9421e-01]],\n\n          [[4.6599e+02, 4.3497e+02, 1.0237e+02,  ..., 5.0331e-02,\n            1.1920e-01, 6.7547e-02],\n           [4.7100e+02, 4.3200e+02, 1.3767e+02,  ..., 4.2088e-02,\n            2.3371e-01, 4.4681e-02],\n           [4.7178e+02, 4.3684e+02, 3.5040e+02,  ..., 9.5349e-02,\n            2.5683e-01, 1.4034e-01]],\n\n          [[4.9700e+02, 4.3300e+02, 1.0897e+02,  ..., 1.2592e-01,\n            1.8243e-01, 8.0357e-02],\n           [4.9750e+02, 4.3100e+02, 1.2933e+02,  ..., 1.2592e-01,\n            3.2082e-01, 8.0357e-02],\n           [4.9799e+02, 4.3399e+02, 2.9049e+02,  ..., 1.4805e-01,\n            3.2082e-01, 1.6452e-01]]],\n\n\n         [[[2.0385e+01, 4.6973e+02, 6.6095e+01,  ..., 9.7085e-03,\n            9.7085e-03, 1.3296e-01],\n           [2.1734e+01, 4.6792e+02, 9.4619e+01,  ..., 1.6915e-02,\n            1.2432e-02, 9.5349e-02],\n           [1.8480e+01, 4.6745e+02, 3.5040e+02,  ..., 4.4681e-02,\n            4.4681e-02, 1.4034e-01]],\n\n          [[4.3157e+01, 4.6838e+02, 7.4895e+01,  ..., 2.7585e-02,\n            5.5549e-03, 1.9193e-01],\n           [4.4555e+01, 4.6648e+02, 1.0722e+02,  ..., 2.5957e-02,\n            1.4064e-02, 1.2592e-01],\n           [5.0480e+01, 4.6792e+02, 3.9706e+02,  ..., 7.1591e-02,\n            2.5957e-02, 1.3296e-01]],\n\n          [[8.4385e+01, 4.6450e+02, 6.2090e+01,  ..., 6.3715e-02,\n            1.7986e-02, 1.3296e-01],\n           [8.6585e+01, 4.6450e+02, 1.2933e+02,  ..., 8.5099e-02,\n            3.5145e-02, 1.2592e-01],\n           [8.3445e+01, 4.6300e+02, 3.9706e+02,  ..., 1.2592e-01,\n            4.7426e-02, 1.8243e-01]],\n\n          ...,\n\n          [[4.3638e+02, 4.6250e+02, 7.9726e+01,  ..., 3.3086e-02,\n            6.7547e-02, 2.7585e-02],\n           [4.3900e+02, 4.6152e+02, 1.3767e+02,  ..., 6.0087e-02,\n            1.4805e-01, 3.3086e-02],\n           [4.3978e+02, 4.6929e+02, 4.2266e+02,  ..., 1.6452e-01,\n            1.8243e-01, 1.4034e-01]],\n\n          [[4.6648e+02, 4.6350e+02, 9.6167e+01,  ..., 2.1615e-02,\n            7.1591e-02, 1.4957e-02],\n           [4.6929e+02, 4.5871e+02, 1.5600e+02,  ..., 2.0332e-02,\n            1.5610e-01, 1.4064e-02],\n           [4.7100e+02, 4.6648e+02, 4.2266e+02,  ..., 4.4681e-02,\n            1.9193e-01, 5.0331e-02]],\n\n          [[4.9600e+02, 4.6400e+02, 9.6167e+01,  ..., 4.4681e-02,\n            7.5858e-02, 2.0332e-02],\n           [4.9650e+02, 4.6201e+02, 1.2149e+02,  ..., 3.9639e-02,\n            1.4805e-01, 2.5957e-02],\n           [4.9700e+02, 4.6550e+02, 3.0923e+02,  ..., 6.7547e-02,\n            2.0181e-01, 8.0357e-02]]],\n\n\n         [[[2.0843e+01, 4.8941e+02, 7.4895e+01,  ..., 3.5145e-02,\n            8.0357e-02, 2.6894e-01],\n           [2.2995e+01, 4.9208e+02, 1.0722e+02,  ..., 8.0357e-02,\n            1.3296e-01, 2.6894e-01],\n           [1.7990e+01, 4.9897e+02, 3.2917e+02,  ..., 1.4034e-01,\n            2.8141e-01, 4.2250e-01]],\n\n          [[4.6504e+01, 4.9208e+02, 9.6167e+01,  ..., 1.4034e-01,\n            1.1280e-01, 3.7754e-01],\n           [5.0480e+01, 4.9208e+02, 1.2149e+02,  ..., 2.2270e-01,\n            1.7329e-01, 3.4865e-01],\n           [4.8500e+01, 4.9897e+02, 3.2917e+02,  ..., 2.5683e-01,\n            2.1207e-01, 4.2250e-01]],\n\n          [[8.1990e+01, 4.9500e+02, 9.0341e+01,  ..., 2.1207e-01,\n            1.6452e-01, 2.4509e-01],\n           [8.5734e+01, 4.9550e+02, 1.2933e+02,  ..., 3.7754e-01,\n            2.6894e-01, 3.0736e-01],\n           [8.2965e+01, 4.9700e+02, 3.2917e+02,  ..., 4.0733e-01,\n            2.9421e-01, 4.5326e-01]],\n\n          ...,\n\n          [[4.3497e+02, 4.9500e+02, 1.2348e+02,  ..., 1.7329e-01,\n            2.8141e-01, 1.1920e-01],\n           [4.3939e+02, 4.9352e+02, 1.4655e+02,  ..., 3.0736e-01,\n            4.6879e-01, 1.4034e-01],\n           [4.3592e+02, 4.9992e+02, 3.9706e+02,  ..., 3.7754e-01,\n            4.5326e-01, 3.3459e-01]],\n\n          [[4.6550e+02, 4.9208e+02, 1.1600e+02,  ..., 1.3296e-01,\n            1.7329e-01, 6.3715e-02],\n           [4.6838e+02, 4.9071e+02, 1.3767e+02,  ..., 2.0181e-01,\n            3.6297e-01, 8.0357e-02],\n           [4.6697e+02, 4.9945e+02, 3.7300e+02,  ..., 2.5683e-01,\n            3.6297e-01, 1.8243e-01]],\n\n          [[4.9450e+02, 4.9255e+02, 1.0237e+02,  ..., 1.6452e-01,\n            1.9193e-01, 8.5099e-02],\n           [4.9450e+02, 4.9303e+02, 1.2149e+02,  ..., 2.8141e-01,\n            3.4865e-01, 1.3296e-01],\n           [4.9650e+02, 5.0084e+02, 2.9049e+02,  ..., 2.6894e-01,\n            3.7754e-01, 2.3371e-01]]]]])\n"
     ]
    }
   ],
   "source": [
    "\n",
    "###############################\n",
    "## TODO: 目标不使用任何库，用C语言实现如下python函数功能\n",
    "\n",
    "import torch\n",
    "import numpy\n",
    "\n",
    "class Detection:\n",
    "\n",
    "    def __init__(self):\n",
    "        #self.__anchors = torch.Tensor( [(1.25, 1.625), (2.0, 3.75), (4.125, 2.875)])             # Anchors for small obj\n",
    "        #self.__anchors = torch.Tensor([(1.875, 3.8125), (3.875, 2.8125), (3.6875, 7.4375)]),    # Anchors for medium obj\n",
    "        self.__anchors = torch.Tensor([(3.625, 2.8125), (4.875, 6.1875), (11.65625, 10.1875)])   # Anchors for big obj\n",
    "\n",
    "        #self.__stride = 8         # Anchors for small obj\n",
    "        #self.__stride = 16        # Anchors for medium obj\n",
    "        self.__stride = 32     # Anchors for big obj\n",
    "\n",
    "\n",
    "        self.__nA = len(self.__anchors) # = 3\n",
    "        self.__nC = 20           # class num for voc dataset\n",
    "        \n",
    "\n",
    "        self.training = True\n",
    "\n",
    "    ## TODO: 传入参数p的数据类型， 维度, 参考？ 能否提供验证的输入输出集?\n",
    "    def forward(self, p):\n",
    "        bs, nG = p.shape[0], p.shape[-1]\n",
    "        p = p.view(bs, self.__nA, 5 + self.__nC, nG, nG).permute(0, 3, 4, 1, 2)\n",
    "\n",
    "        p_de = self.__decode(p.clone())\n",
    "\n",
    "        return (p, p_de)\n",
    "\n",
    "\n",
    "    def __decode(self, p):\n",
    "        batch_size, output_size = p.shape[:2]\n",
    "        print(\"batch_size\",batch_size)\n",
    "        print(\"output_size\",output_size)\n",
    "\n",
    "        device = p.device\n",
    "        stride = self.__stride\n",
    "        anchors = (1.0 * self.__anchors).to(device)\n",
    "\n",
    "        conv_raw_dxdy = p[ :, :, :, :, 0:2]\n",
    "        conv_raw_dwdh = p[ :, :, :, :, 2:4]\n",
    "        conv_raw_conf = p[ :, :, :, :, 4:5]\n",
    "        conv_raw_prob = p[ :, :, :, :, 5:]\n",
    "\n",
    "\n",
    "        y = torch.arange(0, output_size).unsqueeze(1).repeat(1, output_size)\n",
    "        x = torch.arange(0, output_size).unsqueeze(0).repeat(output_size, 1)\n",
    "        grid_xy = torch.stack([x, y], dim = -1)\n",
    "        grid_xy = grid_xy.unsqueeze(0).unsqueeze(3).repeat(batch_size, 1, 1, 3, 1).double().to(device)\n",
    "\n",
    "       \n",
    "\n",
    "        pred_xy = (torch.sigmoid(conv_raw_dxdy) + grid_xy) * stride\n",
    "        pred_wh = (torch.exp(conv_raw_dwdh) * anchors) * stride\n",
    "        pred_xywh = torch.cat([pred_xy, pred_wh], dim = -1)\n",
    "        pred_conf = torch.sigmoid(conv_raw_conf)\n",
    "        pred_prob = torch.sigmoid(conv_raw_prob)\n",
    "        pred_bbox = torch.cat([pred_xywh, pred_conf, pred_prob], dim = -1)\n",
    "\n",
    "        \n",
    "        print(pred_bbox)\n",
    "\n",
    "        ## TODO: self.training 是什么？\n",
    "        return pred_bbox.view(-1, 5 + self.__nC) if not self.training else pred_bbox\n",
    "\n",
    "\n",
    "    def load_p(self, file_name):\n",
    "        # p的shape small [1, 75, 64, 64]  medium[1, 75, 32, 32]  big[1, 75, 16, 16]\n",
    "\n",
    "        with open(file_name, 'r') as f1:\n",
    "            list1 = f1.readlines()\n",
    "        for i in range(0, len(list1)):\n",
    "            list1[i] = double(list1[i].rstrip('\\n'))\n",
    "        #print(list1)\n",
    "        p = torch.Tensor(list1)\n",
    "        p = p.view(1, 75, 16, 16)\n",
    "        \n",
    "        return p\n",
    "\n",
    "\n",
    "\n",
    "def main():\n",
    "   dect =  Detection()\n",
    "   p = dect.load_p(\"/home/ningjian/Code/torch_forward_c/data/big_in.txt\")\n",
    "   result = dect.forward(p)\n",
    "   #print(result[1])\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    main()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])"
      ]
     },
     "metadata": {},
     "execution_count": 3
    }
   ],
   "source": [
    "torch.arange(0, 10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 100,
   "metadata": {},
   "outputs": [],
   "source": [
    "A=torch.ones(2,2,3)\n",
    "B=2*torch.ones(2,2,2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 101,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[[2., 2.],\n",
       "         [2., 2.]],\n",
       "\n",
       "        [[2., 2.],\n",
       "         [2., 2.]]])"
      ]
     },
     "metadata": {},
     "execution_count": 101
    }
   ],
   "source": [
    "B"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 102,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[[1., 1., 1.],\n",
       "         [1., 1., 1.]],\n",
       "\n",
       "        [[1., 1., 1.],\n",
       "         [1., 1., 1.]]])"
      ]
     },
     "metadata": {},
     "execution_count": 102
    }
   ],
   "source": [
    "A"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 103,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[[1., 1., 1., 2., 2.],\n",
       "         [1., 1., 1., 2., 2.]],\n",
       "\n",
       "        [[1., 1., 1., 2., 2.],\n",
       "         [1., 1., 1., 2., 2.]]])"
      ]
     },
     "metadata": {},
     "execution_count": 103
    }
   ],
   "source": [
    "torch.cat((A,B),-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ]
}