{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\Administrator\\Anaconda3\\lib\\site-packages\\socks.py:58: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working\n",
      "  from collections import Callable\n",
      "C:\\Users\\Administrator\\Anaconda3\\lib\\site-packages\\Cython\\Utils.py:231: DeprecationWarning: invalid escape sequence \\s\n",
      "  _match_file_encoding = re.compile(u\"coding[:=]\\s*([-\\w.]+)\").search\n",
      "C:\\Users\\Administrator\\Anaconda3\\lib\\site-packages\\scipy\\linalg\\__init__.py:212: DeprecationWarning: The module numpy.dual is deprecated.  Instead of using dual, use the functions directly from numpy or scipy.\n",
      "  from numpy.dual import register_func\n",
      "C:\\Users\\Administrator\\Anaconda3\\lib\\site-packages\\scipy\\special\\orthogonal.py:81: DeprecationWarning: `np.int` is a deprecated alias for the builtin `int`. To silence this warning, use `int` by itself. Doing this will not modify any behavior and is safe. When replacing `np.int`, you may wish to use e.g. `np.int64` or `np.int32` to specify the precision. If you wish to review your current use, check the release note link for additional information.\n",
      "Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations\n",
      "  from numpy import (exp, inf, pi, sqrt, floor, sin, cos, around, int,\n",
      "C:\\Users\\Administrator\\Anaconda3\\lib\\site-packages\\scipy\\sparse\\sputils.py:16: DeprecationWarning: `np.typeDict` is a deprecated alias for `np.sctypeDict`.\n",
      "  supported_dtypes = [np.typeDict[x] for x in supported_dtypes]\n",
      "C:\\Users\\Administrator\\Anaconda3\\lib\\site-packages\\matplotlib\\colors.py:53: DeprecationWarning: Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated, and in 3.8 it will stop working\n",
      "  from collections import Sized\n",
      "2025-04-28 09:03:48,898-WARNING: type object 'QuantizationTransformPass' has no attribute '_supported_quantizable_op_type'\n",
      "2025-04-28 09:03:48,898-WARNING: If you want to use training-aware and post-training quantization, please use Paddle >= 1.8.4 or develop version\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-04-28 09:03:51 [INFO]\tStarting to read file list from dataset...\n",
      "2025-04-28 09:03:51 [INFO]\t1260 samples in file ./train.txt\n",
      "2025-04-28 09:03:51 [INFO]\tStarting to read file list from dataset...\n",
      "2025-04-28 09:03:51 [INFO]\t317 samples in file ./val.txt\n"
     ]
    }
   ],
   "source": [
    "import paddlex as pdx\n",
    "from paddlex import transforms\n",
    "train_transforms=transforms.Compose([transforms.RandomCrop(crop_size=224),transforms.RandomHorizontalFlip(),\n",
    "                                    transforms.RandomDistort(brightness_range=0.9,brightness_prob=0.5,\n",
    "                                                           contrast_range=0.9,contrast_prob=0.5,\n",
    "                                                           saturation_range=0.9,saturation_prob=0.5,\n",
    "                                                           hue_range=18,hue_prob=0.5),\n",
    "                                    transforms.Normalize()])\n",
    "val_transforms=transforms.Compose([transforms.ResizeByShort(short_size=256),\n",
    "                                  transforms.CenterCrop(crop_size=224),\n",
    "                                  transforms.Normalize()])\n",
    "train_dataset=pdx.datasets.ImageNet(\n",
    "    data_dir='garbage',\n",
    "    file_list='./train.txt',\n",
    "    label_list='./labels.txt',\n",
    "    transforms=train_transforms,\n",
    "    shuffle=True\n",
    ")\n",
    "val_dataset=pdx.datasets.ImageNet(\n",
    "    data_dir='garbage',\n",
    "    file_list='./val.txt',\n",
    "    label_list='./labels.txt',\n",
    "    transforms=val_transforms\n",
    ")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "import paddlex as pdx\n",
    "num_classes=len(train_dataset.labels)\n",
    "model=pdx.cls.ResNet18(num_classes=num_classes)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-04-28 09:05:22 [INFO]\tDownloading ResNet18_pretrained.pdparams from https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet18_pretrained.pdparams\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████████████████████████████████████████████████████████████████████| 69205/69205 [00:16<00:00, 4097.57KB/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-04-28 09:05:39 [INFO]\tLoading pretrained model from output/ResNet18\\pretrain\\ResNet18_pretrained.pdparams\n",
      "2025-04-28 09:05:40 [WARNING]\t[SKIP] Shape of pretrained params fc.weight doesn't match.(Pretrained: (512, 1000), Actual: [512, 3])\n",
      "2025-04-28 09:05:40 [WARNING]\t[SKIP] Shape of pretrained params fc.bias doesn't match.(Pretrained: (1000,), Actual: [3])\n",
      "2025-04-28 09:05:40 [INFO]\tThere are 105/107 variables loaded into ResNet18.\n",
      "2025-04-28 09:06:22 [INFO]\t[TRAIN] Epoch=1/5, Step=10/78, loss=0.819960, acc1=0.812500, acc3=1.000000, lr=0.002000, time_each_step=4.2s, eta=0:27:59\n",
      "2025-04-28 09:07:03 [INFO]\t[TRAIN] Epoch=1/5, Step=20/78, loss=1.023495, acc1=0.625000, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:27:1\n",
      "2025-04-28 09:07:45 [INFO]\t[TRAIN] Epoch=1/5, Step=30/78, loss=0.968072, acc1=0.625000, acc3=1.000000, lr=0.002000, time_each_step=4.2s, eta=0:26:34\n",
      "2025-04-28 09:08:27 [INFO]\t[TRAIN] Epoch=1/5, Step=40/78, loss=0.600682, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.14s, eta=0:25:32\n",
      "2025-04-28 09:09:08 [INFO]\t[TRAIN] Epoch=1/5, Step=50/78, loss=0.503861, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:24:52\n",
      "2025-04-28 09:09:50 [INFO]\t[TRAIN] Epoch=1/5, Step=60/78, loss=0.560347, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.22s, eta=0:24:36\n",
      "2025-04-28 09:10:32 [INFO]\t[TRAIN] Epoch=1/5, Step=70/78, loss=0.969046, acc1=0.562500, acc3=1.000000, lr=0.002000, time_each_step=4.18s, eta=0:23:42\n",
      "2025-04-28 09:11:06 [INFO]\t[TRAIN] Epoch 1 finished, loss=0.7558386, acc1=0.67147434, acc3=1.0 .\n",
      "2025-04-28 09:11:06 [INFO]\tStart to evaluate(total_samples=317, total_steps=20)...\n",
      "2025-04-28 09:11:31 [INFO]\t[EVAL] Finished, Epoch=1, acc1=0.857933, acc3=1.000000 .\n",
      "2025-04-28 09:11:31 [INFO]\tModel saved in output/ResNet18\\best_model.\n",
      "2025-04-28 09:11:31 [INFO]\tCurrent evaluated best model on eval_dataset is epoch_1, acc1=0.8579326868057251\n",
      "2025-04-28 09:11:31 [INFO]\tModel saved in output/ResNet18\\epoch_1.\n",
      "2025-04-28 09:11:40 [INFO]\t[TRAIN] Epoch=2/5, Step=2/78, loss=0.600989, acc1=0.687500, acc3=1.000000, lr=0.002000, time_each_step=4.2s, eta=0:23:0\n",
      "2025-04-28 09:12:22 [INFO]\t[TRAIN] Epoch=2/5, Step=12/78, loss=0.560984, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:22:1\n",
      "2025-04-28 09:13:03 [INFO]\t[TRAIN] Epoch=2/5, Step=22/78, loss=0.400105, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:21:19\n",
      "2025-04-28 09:13:45 [INFO]\t[TRAIN] Epoch=2/5, Step=32/78, loss=0.174807, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:20:41\n",
      "2025-04-28 09:14:26 [INFO]\t[TRAIN] Epoch=2/5, Step=42/78, loss=0.391356, acc1=0.937500, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:20:1\n",
      "2025-04-28 09:15:08 [INFO]\t[TRAIN] Epoch=2/5, Step=52/78, loss=0.618659, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.17s, eta=0:19:22\n",
      "2025-04-28 09:15:50 [INFO]\t[TRAIN] Epoch=2/5, Step=62/78, loss=0.652963, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:18:34\n",
      "2025-04-28 09:16:33 [INFO]\t[TRAIN] Epoch=2/5, Step=72/78, loss=0.427045, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.36s, eta=0:18:44\n",
      "2025-04-28 09:16:59 [INFO]\t[TRAIN] Epoch 2 finished, loss=0.5605527, acc1=0.76602566, acc3=1.0 .\n",
      "2025-04-28 09:16:59 [INFO]\tStart to evaluate(total_samples=317, total_steps=20)...\n",
      "2025-04-28 09:17:25 [INFO]\t[EVAL] Finished, Epoch=2, acc1=0.906250, acc3=1.000000 .\n",
      "2025-04-28 09:17:25 [INFO]\tModel saved in output/ResNet18\\best_model.\n",
      "2025-04-28 09:17:25 [INFO]\tCurrent evaluated best model on eval_dataset is epoch_2, acc1=0.90625\n",
      "2025-04-28 09:17:25 [INFO]\tModel saved in output/ResNet18\\epoch_2.\n",
      "2025-04-28 09:17:42 [INFO]\t[TRAIN] Epoch=3/5, Step=4/78, loss=0.491049, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.27s, eta=0:17:13\n",
      "2025-04-28 09:18:23 [INFO]\t[TRAIN] Epoch=3/5, Step=14/78, loss=0.451772, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.14s, eta=0:16:3\n",
      "2025-04-28 09:19:05 [INFO]\t[TRAIN] Epoch=3/5, Step=24/78, loss=0.279669, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:15:24\n",
      "2025-04-28 09:19:46 [INFO]\t[TRAIN] Epoch=3/5, Step=34/78, loss=0.877868, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.14s, eta=0:14:39\n",
      "2025-04-28 09:20:28 [INFO]\t[TRAIN] Epoch=3/5, Step=44/78, loss=0.965145, acc1=0.625000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:13:59\n",
      "2025-04-28 09:21:09 [INFO]\t[TRAIN] Epoch=3/5, Step=54/78, loss=0.946404, acc1=0.625000, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:13:21\n",
      "2025-04-28 09:21:51 [INFO]\t[TRAIN] Epoch=3/5, Step=64/78, loss=0.310923, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:12:37\n",
      "2025-04-28 09:22:32 [INFO]\t[TRAIN] Epoch=3/5, Step=74/78, loss=1.207605, acc1=0.500000, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:11:56\n",
      "2025-04-28 09:22:49 [INFO]\t[TRAIN] Epoch 3 finished, loss=0.5291262, acc1=0.7796474, acc3=1.0 .\n",
      "2025-04-28 09:22:49 [INFO]\tStart to evaluate(total_samples=317, total_steps=20)...\n",
      "2025-04-28 09:23:15 [INFO]\t[EVAL] Finished, Epoch=3, acc1=0.833654, acc3=1.000000 .\n",
      "2025-04-28 09:23:15 [INFO]\tCurrent evaluated best model on eval_dataset is epoch_2, acc1=0.90625\n",
      "2025-04-28 09:23:15 [INFO]\tModel saved in output/ResNet18\\epoch_3.\n",
      "2025-04-28 09:23:41 [INFO]\t[TRAIN] Epoch=4/5, Step=6/78, loss=0.907304, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.22s, eta=0:10:58\n",
      "2025-04-28 09:24:22 [INFO]\t[TRAIN] Epoch=4/5, Step=16/78, loss=0.249740, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:10:7\n",
      "2025-04-28 09:25:04 [INFO]\t[TRAIN] Epoch=4/5, Step=26/78, loss=0.228206, acc1=0.937500, acc3=1.000000, lr=0.002000, time_each_step=4.16s, eta=0:9:26\n",
      "2025-04-28 09:25:45 [INFO]\t[TRAIN] Epoch=4/5, Step=36/78, loss=0.652219, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.13s, eta=0:8:42\n",
      "2025-04-28 09:26:27 [INFO]\t[TRAIN] Epoch=4/5, Step=46/78, loss=0.265076, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:8:2\n",
      "2025-04-28 09:27:08 [INFO]\t[TRAIN] Epoch=4/5, Step=56/78, loss=0.828683, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.13s, eta=0:7:19\n",
      "2025-04-28 09:27:49 [INFO]\t[TRAIN] Epoch=4/5, Step=66/78, loss=0.180896, acc1=1.000000, acc3=1.000000, lr=0.002000, time_each_step=4.12s, eta=0:6:37\n",
      "2025-04-28 09:28:31 [INFO]\t[TRAIN] Epoch=4/5, Step=76/78, loss=0.482060, acc1=0.687500, acc3=1.000000, lr=0.002000, time_each_step=4.13s, eta=0:5:56\n",
      "2025-04-28 09:28:39 [INFO]\t[TRAIN] Epoch 4 finished, loss=0.44931304, acc1=0.82211536, acc3=1.0 .\n",
      "2025-04-28 09:28:39 [INFO]\tStart to evaluate(total_samples=317, total_steps=20)...\n",
      "2025-04-28 09:29:04 [INFO]\t[EVAL] Finished, Epoch=4, acc1=0.906250, acc3=1.000000 .\n",
      "2025-04-28 09:29:04 [INFO]\tCurrent evaluated best model on eval_dataset is epoch_2, acc1=0.90625\n",
      "2025-04-28 09:29:05 [INFO]\tModel saved in output/ResNet18\\epoch_4.\n",
      "2025-04-28 09:29:38 [INFO]\t[TRAIN] Epoch=5/5, Step=8/78, loss=0.469012, acc1=0.812500, acc3=1.000000, lr=0.002000, time_each_step=4.18s, eta=0:4:52\n",
      "2025-04-28 09:30:19 [INFO]\t[TRAIN] Epoch=5/5, Step=18/78, loss=0.361736, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.13s, eta=0:4:7\n",
      "2025-04-28 09:31:01 [INFO]\t[TRAIN] Epoch=5/5, Step=28/78, loss=0.272505, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.17s, eta=0:3:28\n",
      "2025-04-28 09:31:43 [INFO]\t[TRAIN] Epoch=5/5, Step=38/78, loss=0.513100, acc1=0.812500, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:2:45\n",
      "2025-04-28 09:32:24 [INFO]\t[TRAIN] Epoch=5/5, Step=48/78, loss=0.140279, acc1=0.937500, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:2:4\n",
      "2025-04-28 09:33:06 [INFO]\t[TRAIN] Epoch=5/5, Step=58/78, loss=0.414931, acc1=0.875000, acc3=1.000000, lr=0.002000, time_each_step=4.14s, eta=0:1:22\n",
      "2025-04-28 09:33:47 [INFO]\t[TRAIN] Epoch=5/5, Step=68/78, loss=1.235290, acc1=0.687500, acc3=1.000000, lr=0.002000, time_each_step=4.15s, eta=0:0:41\n",
      "2025-04-28 09:34:28 [INFO]\t[TRAIN] Epoch=5/5, Step=78/78, loss=0.456953, acc1=0.750000, acc3=1.000000, lr=0.002000, time_each_step=4.12s, eta=0:0:0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-04-28 09:34:28 [INFO]\t[TRAIN] Epoch 5 finished, loss=0.44310722, acc1=0.8261218, acc3=1.0 .\n",
      "2025-04-28 09:34:28 [INFO]\tStart to evaluate(total_samples=317, total_steps=20)...\n",
      "2025-04-28 09:34:54 [INFO]\t[EVAL] Finished, Epoch=5, acc1=0.933654, acc3=1.000000 .\n",
      "2025-04-28 09:34:54 [INFO]\tModel saved in output/ResNet18\\best_model.\n",
      "2025-04-28 09:34:54 [INFO]\tCurrent evaluated best model on eval_dataset is epoch_5, acc1=0.9336538314819336\n",
      "2025-04-28 09:34:54 [INFO]\tModel saved in output/ResNet18\\epoch_5.\n",
      "2025-04-28 09:34:55 [INFO]\tModel[ResNet18] loaded.\n"
     ]
    },
    {
     "ename": "error",
     "evalue": "OpenCV(4.11.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\imgproc\\src\\color.cpp:199: error: (-215:Assertion failed) !_src.empty() in function 'cv::cvtColor'\n",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31merror\u001b[0m                                     Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-10-a374f1cce9bc>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[0;32m     10\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mpdx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mload_model\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'output/ResNet18/best_model'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     11\u001b[0m \u001b[0mimage_name\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'./data/garbage/paper/paper10.jpg'\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 12\u001b[1;33m \u001b[0mresult\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mimage_name\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m     13\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Predict Result:'\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mresult\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     14\u001b[0m \u001b[0mnumber\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mresult\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'category'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\paddlex\\cv\\models\\classifier.py\u001b[0m in \u001b[0;36mpredict\u001b[1;34m(self, img_file, transforms, topk)\u001b[0m\n\u001b[0;32m    488\u001b[0m         \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    489\u001b[0m             \u001b[0mimages\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mimg_file\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 490\u001b[1;33m         \u001b[0mim\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_preprocess\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mimages\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtransforms\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    491\u001b[0m         \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mnet\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0meval\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    492\u001b[0m         \u001b[1;32mwith\u001b[0m \u001b[0mpaddle\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mno_grad\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\paddlex\\cv\\models\\classifier.py\u001b[0m in \u001b[0;36m_preprocess\u001b[1;34m(self, images, transforms, to_tensor)\u001b[0m\n\u001b[0;32m    505\u001b[0m         \u001b[1;32mfor\u001b[0m \u001b[0mim\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mimages\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    506\u001b[0m             \u001b[0msample\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m{\u001b[0m\u001b[1;34m'image'\u001b[0m\u001b[1;33m:\u001b[0m \u001b[0mim\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 507\u001b[1;33m             \u001b[0mbatch_im\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtransforms\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msample\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    508\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    509\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mto_tensor\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\paddlex\\cv\\transforms\\operators.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, sample)\u001b[0m\n\u001b[0;32m    117\u001b[0m             \u001b[1;32mdel\u001b[0m \u001b[0msample\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'mask'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    118\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 119\u001b[1;33m         \u001b[0msample\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdecode_image\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msample\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    120\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    121\u001b[0m         \u001b[1;32mfor\u001b[0m \u001b[0mop\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtransforms\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\paddlex\\cv\\transforms\\operators.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, sample)\u001b[0m\n\u001b[0;32m     80\u001b[0m             \u001b[0msample\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0ms\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0ms\u001b[0m \u001b[1;32min\u001b[0m \u001b[0msample\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     81\u001b[0m         \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 82\u001b[1;33m             \u001b[0msample\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msample\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m     83\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     84\u001b[0m         \u001b[1;32mreturn\u001b[0m \u001b[0msample\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\paddlex\\cv\\transforms\\operators.py\u001b[0m in \u001b[0;36mapply\u001b[1;34m(self, sample)\u001b[0m\n\u001b[0;32m    187\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    188\u001b[0m         \"\"\"\n\u001b[1;32m--> 189\u001b[1;33m         \u001b[0msample\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'image'\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply_im\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msample\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'image'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    190\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[1;34m'mask'\u001b[0m \u001b[1;32min\u001b[0m \u001b[0msample\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    191\u001b[0m             \u001b[0msample\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'mask'\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mapply_mask\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msample\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'mask'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\paddlex\\cv\\transforms\\operators.py\u001b[0m in \u001b[0;36mapply_im\u001b[1;34m(self, im_path)\u001b[0m\n\u001b[0;32m    162\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    163\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto_rgb\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 164\u001b[1;33m             \u001b[0mimage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcvtColor\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mimage\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mCOLOR_BGR2RGB\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    165\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    166\u001b[0m         \u001b[1;32mreturn\u001b[0m \u001b[0mimage\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31merror\u001b[0m: OpenCV(4.11.0) D:\\a\\opencv-python\\opencv-python\\opencv\\modules\\imgproc\\src\\color.cpp:199: error: (-215:Assertion failed) !_src.empty() in function 'cv::cvtColor'\n"
     ]
    }
   ],
   "source": [
    "model.train(num_epochs=5,\n",
    "           train_dataset=train_dataset,\n",
    "           train_batch_size=16,\n",
    "           eval_dataset=val_dataset,\n",
    "           lr_decay_epochs=[80,100,150],\n",
    "           save_interval_epochs=1,\n",
    "           learning_rate=0.002,\n",
    "           save_dir='output/ResNet18',\n",
    "           use_vdl=True)\n",
    "model=pdx.load_model('output/ResNet18/best_model')\n",
    "image_name='./data/garbage/paper/paper10.jpg'\n",
    "result=model.predict(image_name)\n",
    "print('Predict Result:',result)\n",
    "number=result[0]['category']\n",
    "number"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.0"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
