{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "provenance": [],
      "toc_visible": true
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    },
    "widgets": {
      "application/vnd.jupyter.widget-state+json": {
        "630666c0f017489baf009296fb229e4b": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HBoxModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HBoxModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HBoxView",
            "box_style": "",
            "children": [
              "IPY_MODEL_94f442279c27439e9051ebf8d11aefd1",
              "IPY_MODEL_f3dedde1e3754c4d8d5c805c40701022",
              "IPY_MODEL_5855126bdd834db08c9cbd9f0341be6d"
            ],
            "layout": "IPY_MODEL_54273d2bae0d4fb1a96cd6601eaae2f6"
          }
        },
        "94f442279c27439e9051ebf8d11aefd1": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_4c8453c748d44a769c5892488dd976b1",
            "placeholder": "​",
            "style": "IPY_MODEL_8b08f135183e4d6f80563d6dddc853df",
            "value": "Loading checkpoint shards: 100%"
          }
        },
        "f3dedde1e3754c4d8d5c805c40701022": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "FloatProgressModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "FloatProgressModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "ProgressView",
            "bar_style": "success",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_537b9689b3c54e79ba38d3d12863eada",
            "max": 2,
            "min": 0,
            "orientation": "horizontal",
            "style": "IPY_MODEL_069f12badc7346aeb1bc717004a903ec",
            "value": 2
          }
        },
        "5855126bdd834db08c9cbd9f0341be6d": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_d4146379238b47a4ab397b426763f661",
            "placeholder": "​",
            "style": "IPY_MODEL_0e32116b1fc54e5987e23509901179bf",
            "value": " 2/2 [00:19&lt;00:00,  9.37s/it]"
          }
        },
        "54273d2bae0d4fb1a96cd6601eaae2f6": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "4c8453c748d44a769c5892488dd976b1": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "8b08f135183e4d6f80563d6dddc853df": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "537b9689b3c54e79ba38d3d12863eada": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "069f12badc7346aeb1bc717004a903ec": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "ProgressStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "ProgressStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "bar_color": null,
            "description_width": ""
          }
        },
        "d4146379238b47a4ab397b426763f661": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "0e32116b1fc54e5987e23509901179bf": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "af5cb14f70394c459de5352f3dc7890f": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HBoxModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HBoxModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HBoxView",
            "box_style": "",
            "children": [
              "IPY_MODEL_4fd33cfaafd64def978e088ff9b9762e",
              "IPY_MODEL_0f437f954d264ba1a6d6ad11a1b62702",
              "IPY_MODEL_adcb4ef53bae4956a9474ce2b8610a11"
            ],
            "layout": "IPY_MODEL_f1674313b2be48faab4d22f6b6268894"
          }
        },
        "4fd33cfaafd64def978e088ff9b9762e": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_a9747e3bf336442899f0144d04f719bf",
            "placeholder": "​",
            "style": "IPY_MODEL_ed8f0d52e98849bca20166f5bef662be",
            "value": "Loading checkpoint shards: 100%"
          }
        },
        "0f437f954d264ba1a6d6ad11a1b62702": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "FloatProgressModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "FloatProgressModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "ProgressView",
            "bar_style": "success",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_63903b40542e43938c5beb8abda3a0c9",
            "max": 2,
            "min": 0,
            "orientation": "horizontal",
            "style": "IPY_MODEL_3b41fe56ae5f4a0c871d5cc5fbb6ca37",
            "value": 2
          }
        },
        "adcb4ef53bae4956a9474ce2b8610a11": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_d56041c5ddb646b6aab0f47e9507171d",
            "placeholder": "​",
            "style": "IPY_MODEL_8644dcd2e00d4082b5bbf65aa84391cf",
            "value": " 2/2 [00:11&lt;00:00,  5.16s/it]"
          }
        },
        "f1674313b2be48faab4d22f6b6268894": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "a9747e3bf336442899f0144d04f719bf": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "ed8f0d52e98849bca20166f5bef662be": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "63903b40542e43938c5beb8abda3a0c9": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "3b41fe56ae5f4a0c871d5cc5fbb6ca37": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "ProgressStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "ProgressStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "bar_color": null,
            "description_width": ""
          }
        },
        "d56041c5ddb646b6aab0f47e9507171d": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "8644dcd2e00d4082b5bbf65aa84391cf": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        }
      }
    }
  },
  "cells": [
    {
      "cell_type": "markdown",
      "source": [
        "# 1. Load LLM"
      ],
      "metadata": {
        "id": "LwJk8w30rMwm"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "import torch\n",
        "print(torch.cuda.is_available())  # check GPU status\n",
        "print(torch.cuda.get_device_name(0))  # show device name"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "p0xiEY1arQRP",
        "outputId": "66e6205e-7675-4008-e9ad-7b495e75e1e4"
      },
      "execution_count": 98,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "True\n",
            "NVIDIA GeForce RTX 2060\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "!pip install huggingface_hub"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "CnLOUemLrRfA",
        "outputId": "fd7eaef9-6047-4bc6-f084-e37f9008fb97"
      },
      "execution_count": 99,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Requirement already satisfied: huggingface_hub in e:\\anaconda\\lib\\site-packages (0.29.2)"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "WARNING: Ignoring invalid distribution -umpy (e:\\anaconda\\lib\\site-packages)\n",
            "WARNING: Ignoring invalid distribution -umpy (e:\\anaconda\\lib\\site-packages)\n",
            "WARNING: Ignoring invalid distribution -umpy (e:\\anaconda\\lib\\site-packages)\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            "Requirement already satisfied: filelock in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (3.13.1)\n",
            "Requirement already satisfied: fsspec>=2023.5.0 in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (2024.12.0)\n",
            "Requirement already satisfied: packaging>=20.9 in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (24.2)\n",
            "Requirement already satisfied: pyyaml>=5.1 in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (6.0.2)\n",
            "Requirement already satisfied: requests in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (2.32.3)\n",
            "Requirement already satisfied: tqdm>=4.42.1 in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (4.67.1)\n",
            "Requirement already satisfied: typing-extensions>=3.7.4.3 in e:\\anaconda\\lib\\site-packages (from huggingface_hub) (4.12.2)\n",
            "Requirement already satisfied: colorama in e:\\anaconda\\lib\\site-packages (from tqdm>=4.42.1->huggingface_hub) (0.4.6)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in e:\\anaconda\\lib\\site-packages (from requests->huggingface_hub) (3.3.2)\n",
            "Requirement already satisfied: idna<4,>=2.5 in e:\\anaconda\\lib\\site-packages (from requests->huggingface_hub) (3.7)\n",
            "Requirement already satisfied: urllib3<3,>=1.21.1 in e:\\anaconda\\lib\\site-packages (from requests->huggingface_hub) (1.26.19)\n",
            "Requirement already satisfied: certifi>=2017.4.17 in e:\\anaconda\\lib\\site-packages (from requests->huggingface_hub) (2025.1.31)\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import transformers\n",
        "import torch\n",
        "from huggingface_hub import login\n",
        "import re"
      ],
      "metadata": {
        "id": "b8MbVLxurWra"
      },
      "execution_count": 100,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "from huggingface_hub import login\n",
        "\n",
        "hf_token = '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'\n",
        "login(token=hf_token)\n",
        "print(\"Hugging Face login successful!\")"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "hD42mIuIrYnd",
        "outputId": "dd1167d5-b117-4523-83f9-4db79de9c20f"
      },
      "execution_count": 101,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Hugging Face login successful!\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline\n",
        "\n",
        "# load LLaMA-3.2-3B\n",
        "MODEL_ID = \"meta-llama/Llama-3.2-3B-Instruct\"\n",
        "tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)\n",
        "model = AutoModelForCausalLM.from_pretrained(MODEL_ID)\n",
        "\n",
        "# generate\n",
        "def query_llm(prompt):\n",
        "    pipe = pipeline(\"text-generation\", model=model, tokenizer=tokenizer, device=0)\n",
        "    result = pipe(prompt, max_length=100, do_sample=True, top_k=50, top_p=0.95)\n",
        "    return result[0][\"generated_text\"]\n",
        "\n",
        "print(\"LLaMA-3.2-3B load success！\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 66,
          "referenced_widgets": [
            "630666c0f017489baf009296fb229e4b",
            "94f442279c27439e9051ebf8d11aefd1",
            "f3dedde1e3754c4d8d5c805c40701022",
            "5855126bdd834db08c9cbd9f0341be6d",
            "54273d2bae0d4fb1a96cd6601eaae2f6",
            "4c8453c748d44a769c5892488dd976b1",
            "8b08f135183e4d6f80563d6dddc853df",
            "537b9689b3c54e79ba38d3d12863eada",
            "069f12badc7346aeb1bc717004a903ec",
            "d4146379238b47a4ab397b426763f661",
            "0e32116b1fc54e5987e23509901179bf"
          ]
        },
        "id": "9Ku_NIxjsivf",
        "outputId": "ae63aa81-a2e6-49a5-ce95-c179efb4f562"
      },
      "execution_count": 102,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "Loading checkpoint shards:   0%|          | 0/2 [00:00<?, ?it/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "630666c0f017489baf009296fb229e4b"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "LLaMA-3.2-3B load success！\n"
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "# 2. Data Preprocessing"
      ],
      "metadata": {
        "id": "VkIao3XopFJ4"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "import sqlite3\n",
        "import pandas as pd\n",
        "\n",
        "DB_PATH = \"menu.db\"\n",
        "\n",
        "def init_db():\n",
        "    conn = sqlite3.connect(DB_PATH)\n",
        "    cursor = conn.cursor()\n",
        "\n",
        "    cursor.execute('''\n",
        "    DROP TABLE IF EXISTS menu;\n",
        "    ''')\n",
        "\n",
        "    cursor.execute('''\n",
        "    CREATE TABLE menu (\n",
        "        id INTEGER PRIMARY KEY AUTOINCREMENT,\n",
        "        name TEXT NOT NULL,\n",
        "        description TEXT,\n",
        "        price REAL,\n",
        "        ingredients TEXT,\n",
        "        spicy INTEGER,\n",
        "        vegetarian INTEGER\n",
        "    )\n",
        "    ''')\n",
        "\n",
        "    conn.commit()\n",
        "    conn.close()\n",
        "\n",
        "def load_data_from_csv(csv_path):\n",
        "    df = pd.read_csv(csv_path)\n",
        "\n",
        "    # processing\n",
        "    df[\"ingredients\"] = df[\"ingredients\"].apply(lambda x: x.strip(\"[]\").replace(\"'\", \"\"))\n",
        "    df[\"spicy\"] = df[\"spicy\"].astype(int)\n",
        "    df[\"vegetarian\"] = df[\"vegetarian\"].astype(int)\n",
        "\n",
        "    conn = sqlite3.connect(DB_PATH)\n",
        "    cursor = conn.cursor()\n",
        "\n",
        "    for _, row in df.iterrows():\n",
        "        cursor.execute('''\n",
        "            INSERT INTO menu (name, description, price, ingredients, spicy, vegetarian)\n",
        "            VALUES (?, ?, ?, ?, ?, ?)\n",
        "        ''', (row[\"name\"], row[\"description\"], row[\"price\"], row[\"ingredients\"], row[\"spicy\"], row[\"vegetarian\"]))\n",
        "\n",
        "    conn.commit()\n",
        "    conn.close()\n"
      ],
      "metadata": {
        "id": "PqqaRu2RsylH"
      },
      "execution_count": 103,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "# 1. initialize\n",
        "init_db()\n",
        "\n",
        "# 2. load data\n",
        "csv_path = r\"E:\\Users\\76044\\Desktop\\trippytacos\\menu.csv\"\n",
        "load_data_from_csv(csv_path)\n",
        "\n",
        "# 3. check id\n",
        "conn = sqlite3.connect(DB_PATH)\n",
        "cursor = conn.cursor()\n",
        "cursor.execute(\"PRAGMA table_info(menu);\")\n",
        "columns = cursor.fetchall()\n",
        "conn.close()\n",
        "\n",
        "print(\"\\n `menu` structure:\")\n",
        "for col in columns:\n",
        "    print(col)\n",
        "\n",
        "# 4. show top 5\n",
        "conn = sqlite3.connect(DB_PATH)\n",
        "cursor = conn.cursor()\n",
        "cursor.execute(\"SELECT * FROM menu LIMIT 5\")\n",
        "rows = cursor.fetchall()\n",
        "conn.close()\n",
        "\n",
        "print(\"\\n `menu` top 5:\")\n",
        "for row in rows:\n",
        "    print(row)\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "0fxP07JjuEyz",
        "outputId": "b6228921-6171-421b-acf3-2f2d2a79ef44"
      },
      "execution_count": 104,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            " `menu` structure:\n",
            "(0, 'id', 'INTEGER', 0, None, 1)\n",
            "(1, 'name', 'TEXT', 1, None, 0)\n",
            "(2, 'description', 'TEXT', 0, None, 0)\n",
            "(3, 'price', 'REAL', 0, None, 0)\n",
            "(4, 'ingredients', 'TEXT', 0, None, 0)\n",
            "(5, 'spicy', 'INTEGER', 0, None, 0)\n",
            "(6, 'vegetarian', 'INTEGER', 0, None, 0)\n",
            "\n",
            " `menu` top 5:\n",
            "(1, 'Steak Taco', 'Freshly Made Corn Tortilla, Steak, Onions and Cilantro. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side', 3.89, 'Corn Tortilla, Steak, Onions, Cilantro, Lime, Red Sauce, Green Sauce', 1, 0)\n",
            "(2, '3 Birria Tacos', 'Corn Tortillas, Melted Cheese, Choice of protein, Onions, Cilantro, and Lime. Hot Sauce, Green Sauce (not Spicy), and Consomé on the Side.', 13.99, 'Corn Tortilla, Melted Cheese, Choice of Protein, Onions, Cilantro, Lime, Hot Sauce, Green Sauce, Consomé', 0, 0)\n",
            "(3, 'Chicken Taco', 'Freshly Made Corn Tortilla, Chicken, Onions and Cilantro. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side', 3.39, 'Corn Tortilla, Chicken, Onions, Cilantro, Lime, Red Sauce, Green Sauce', 1, 0)\n",
            "(4, 'Al Pastor Taco', 'Freshly Made Corn Tortilla, Al Pastor, Onions and Cilantro. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side', 3.39, 'Corn Tortilla, Al Pastor, Onions, Cilantro, Lime, Red Sauce, Green Sauce', 1, 0)\n",
            "(5, 'Lengua Taco', 'Freshly Made Corn Tortilla, Lengua, Onions and Cilantro. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side', 4.49, 'Corn Tortilla, Lengua, Onions, Cilantro, Lime, Red Sauce, Green Sauce', 1, 0)\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import sqlite3\n",
        "\n",
        "conn = sqlite3.connect(DB_PATH)\n",
        "cursor = conn.cursor()\n",
        "\n",
        "\n",
        "cursor.execute(\"PRAGMA table_info(menu);\")\n",
        "columns = cursor.fetchall()\n",
        "\n",
        "\n",
        "for col in columns:\n",
        "    print(col)\n",
        "\n",
        "conn.close()\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Tt8SFKWa1BXy",
        "outputId": "c19def50-4fcd-4cb4-d8eb-e54c6a60cd1b"
      },
      "execution_count": 105,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "(0, 'id', 'INTEGER', 0, None, 1)\n",
            "(1, 'name', 'TEXT', 1, None, 0)\n",
            "(2, 'description', 'TEXT', 0, None, 0)\n",
            "(3, 'price', 'REAL', 0, None, 0)\n",
            "(4, 'ingredients', 'TEXT', 0, None, 0)\n",
            "(5, 'spicy', 'INTEGER', 0, None, 0)\n",
            "(6, 'vegetarian', 'INTEGER', 0, None, 0)\n"
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "# 3. FAISS"
      ],
      "metadata": {
        "id": "dlGC_HE0ptZ9"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "import sys\n",
        "print(sys.executable)\n",
        "print(sys.version)"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "dn68qULuwNsj",
        "outputId": "21451ced-d484-4a74-a8d4-69aff88043a3"
      },
      "execution_count": 106,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "E:\\Anaconda\\python.exe\n",
            "3.9.21 (main, Dec 11 2024, 16:35:24) [MSC v.1929 64 bit (AMD64)]\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import faiss\n",
        "print(f\"FAISS version: {faiss.__version__}\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "DloMsdUszlar",
        "outputId": "84e8e5ab-29d8-44e4-b89f-ae834c127643"
      },
      "execution_count": 107,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "FAISS version: 1.10.0\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "!pip install sentence-transformers"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "5rSJtgMp0v1H",
        "outputId": "53e1d041-6c63-4b5e-88e7-59a955897b74"
      },
      "execution_count": 108,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Requirement already satisfied: sentence-transformers in e:\\anaconda\\lib\\site-packages (3.4.1)"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "WARNING: Ignoring invalid distribution -umpy (e:\\anaconda\\lib\\site-packages)\n",
            "WARNING: Ignoring invalid distribution -umpy (e:\\anaconda\\lib\\site-packages)\n",
            "WARNING: Ignoring invalid distribution -umpy (e:\\anaconda\\lib\\site-packages)\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            "Requirement already satisfied: transformers<5.0.0,>=4.41.0 in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (4.49.0)\n",
            "Requirement already satisfied: tqdm in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (4.67.1)\n",
            "Requirement already satisfied: torch>=1.11.0 in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (2.5.1+cu121)\n",
            "Requirement already satisfied: scikit-learn in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (1.6.1)\n",
            "Requirement already satisfied: scipy in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (1.13.1)\n",
            "Requirement already satisfied: huggingface-hub>=0.20.0 in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (0.29.2)\n",
            "Requirement already satisfied: Pillow in e:\\anaconda\\lib\\site-packages (from sentence-transformers) (11.1.0)\n",
            "Requirement already satisfied: filelock in e:\\anaconda\\lib\\site-packages (from huggingface-hub>=0.20.0->sentence-transformers) (3.13.1)\n",
            "Requirement already satisfied: fsspec>=2023.5.0 in e:\\anaconda\\lib\\site-packages (from huggingface-hub>=0.20.0->sentence-transformers) (2024.12.0)\n",
            "Requirement already satisfied: packaging>=20.9 in e:\\anaconda\\lib\\site-packages (from huggingface-hub>=0.20.0->sentence-transformers) (24.2)\n",
            "Requirement already satisfied: pyyaml>=5.1 in e:\\anaconda\\lib\\site-packages (from huggingface-hub>=0.20.0->sentence-transformers) (6.0.2)\n",
            "Requirement already satisfied: requests in e:\\anaconda\\lib\\site-packages (from huggingface-hub>=0.20.0->sentence-transformers) (2.32.3)\n",
            "Requirement already satisfied: typing-extensions>=3.7.4.3 in e:\\anaconda\\lib\\site-packages (from huggingface-hub>=0.20.0->sentence-transformers) (4.12.2)\n",
            "Requirement already satisfied: networkx in e:\\anaconda\\lib\\site-packages (from torch>=1.11.0->sentence-transformers) (3.2.1)\n",
            "Requirement already satisfied: jinja2 in e:\\anaconda\\lib\\site-packages (from torch>=1.11.0->sentence-transformers) (3.1.5)\n",
            "Requirement already satisfied: sympy==1.13.1 in e:\\anaconda\\lib\\site-packages (from torch>=1.11.0->sentence-transformers) (1.13.1)\n",
            "Requirement already satisfied: mpmath<1.4,>=1.1.0 in e:\\anaconda\\lib\\site-packages (from sympy==1.13.1->torch>=1.11.0->sentence-transformers) (1.3.0)\n",
            "Requirement already satisfied: colorama in e:\\anaconda\\lib\\site-packages (from tqdm->sentence-transformers) (0.4.6)\n",
            "Requirement already satisfied: numpy>=1.17 in e:\\anaconda\\lib\\site-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (1.23.5)\n",
            "Requirement already satisfied: regex!=2019.12.17 in e:\\anaconda\\lib\\site-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (2024.11.6)\n",
            "Requirement already satisfied: tokenizers<0.22,>=0.21 in e:\\anaconda\\lib\\site-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (0.21.0)\n",
            "Requirement already satisfied: safetensors>=0.4.1 in e:\\anaconda\\lib\\site-packages (from transformers<5.0.0,>=4.41.0->sentence-transformers) (0.5.3)\n",
            "Requirement already satisfied: joblib>=1.2.0 in e:\\anaconda\\lib\\site-packages (from scikit-learn->sentence-transformers) (1.4.2)\n",
            "Requirement already satisfied: threadpoolctl>=3.1.0 in e:\\anaconda\\lib\\site-packages (from scikit-learn->sentence-transformers) (3.5.0)\n",
            "Requirement already satisfied: MarkupSafe>=2.0 in e:\\anaconda\\lib\\site-packages (from jinja2->torch>=1.11.0->sentence-transformers) (3.0.2)\n",
            "Requirement already satisfied: charset-normalizer<4,>=2 in e:\\anaconda\\lib\\site-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (3.3.2)\n",
            "Requirement already satisfied: idna<4,>=2.5 in e:\\anaconda\\lib\\site-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (3.7)\n",
            "Requirement already satisfied: urllib3<3,>=1.21.1 in e:\\anaconda\\lib\\site-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (1.26.19)\n",
            "Requirement already satisfied: certifi>=2017.4.17 in e:\\anaconda\\lib\\site-packages (from requests->huggingface-hub>=0.20.0->sentence-transformers) (2025.1.31)\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import faiss\n",
        "import numpy as np\n",
        "from sentence_transformers import SentenceTransformer\n",
        "\n",
        "DB_PATH = \"menu.db\"\n",
        "INDEX_PATH = \"menu_faiss.index\"\n",
        "\n",
        "# 1. load NLP model\n",
        "model = SentenceTransformer(\"all-MiniLM-L6-v2\")\n",
        "\n",
        "# 2. load database\n",
        "conn = sqlite3.connect(DB_PATH)\n",
        "cursor = conn.cursor()\n",
        "cursor.execute(\"SELECT id, name, description FROM menu\")\n",
        "menu_items = cursor.fetchall()\n",
        "conn.close()\n",
        "\n",
        "if len(menu_items) == 0:\n",
        "    print(\"error, database is empty!\")\n",
        "    exit()\n",
        "\n",
        "# 3. create text vector\n",
        "menu_texts = [f\"{item[1]} - {item[2]}\" for item in menu_items]\n",
        "menu_vectors = np.array([model.encode(text) for text in menu_texts])\n",
        "\n",
        "# 3. use `IndexIDMap` store `id`\n",
        "index = faiss.IndexIDMap(faiss.IndexFlatL2(menu_vectors.shape[1]))\n",
        "\n",
        "# 4. store `id`\n",
        "ids = np.array([item[0] for item in menu_items], dtype=np.int64)\n",
        "\n",
        "# 5. add data into faiss\n",
        "index.add_with_ids(menu_vectors, ids)\n",
        "\n",
        "# 6. check index\n",
        "print(f\"FAISS index size: {index.ntotal}\")\n",
        "\n",
        "# 7. store\n",
        "faiss.write_index(index, INDEX_PATH)\n",
        "print(\"faiss index stored!\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "ii5PiCOp4NsH",
        "outputId": "5c7f98d8-2a64-414a-821d-12ce7e5ab25f"
      },
      "execution_count": 109,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "FAISS index size: 28\n",
            "faiss index stored!\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "print(f\"FAISS index size: {index.ntotal}\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "cJ0mdfQmubnf",
        "outputId": "fc5f264c-65e8-4fcf-b8a9-871a44da5573"
      },
      "execution_count": 110,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "FAISS index size: 28\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import sqlite3\n",
        "\n",
        "def search_menu(query, top_k=3):\n",
        "    \"\"\"according to user query, find the most relevent food in menu\"\"\"\n",
        "    query_vector = model.encode(query).reshape(1, -1)\n",
        "    distances, indices = index.search(query_vector, top_k)\n",
        "\n",
        "    print(\"\\n FAISS search result：\")\n",
        "    print(\" (distances):\", distances)\n",
        "    print(\" (indices):\", indices)\n",
        "\n",
        "    # **convert into Python int type**\n",
        "    real_ids = [int(idx) for idx in indices[0]]\n",
        "\n",
        "    # **find the most relevent item**\n",
        "    conn = sqlite3.connect(\"menu.db\")\n",
        "    cursor = conn.cursor()\n",
        "\n",
        "    menu_items = []\n",
        "    for real_id in real_ids:\n",
        "        cursor.execute(\"SELECT id, name, description, price FROM menu WHERE id=?\", (real_id,))\n",
        "        item = cursor.fetchone()\n",
        "        if item:\n",
        "            menu_items.append(item)\n",
        "\n",
        "    conn.close()\n",
        "    return menu_items\n",
        "\n",
        "# **test**\n",
        "query = \"I like spicy food\"\n",
        "results = search_menu(query)\n",
        "\n",
        "print(\"\\n food recommendation：\")\n",
        "for item in results:\n",
        "    print(f\"{item[0]} - {item[1]} (${item[2]})\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "qF5INoUy4gqC",
        "outputId": "984b25cc-c302-4c59-bee6-bfd0f6b92d17"
      },
      "execution_count": 111,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            " FAISS search result：\n",
            " (distances): [[1.0116991 1.0352366 1.072386 ]]\n",
            " (indices): [[14 15  9]]\n",
            "\n",
            " food recommendation：\n",
            "14 - Trippy Burrito ($Fries, Jack Cheese, Nacho Cheese, Steak, Pico de Gallo, Crema, Lettuce, Trippy Sauce (Not Spicy))\n",
            "15 - Trippy Burrito Spicy ($Fries, Jack Cheese, Nacho Cheese, Steak, Pico de Gallo, Crema, Lettuce, Jalapenos, Red Sauce (Spicy), Trippy Sauce (Not Spicy))\n",
            "9 - Steak Burrito ($Flour Tortilla, Rice, Beans, Steak, Cheese, Green Sauce (Mild), Pico de Gallo, Crema, and Trippy Sauce (not spicy))\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import sqlite3\n",
        "\n",
        "conn = sqlite3.connect(\"menu.db\")\n",
        "cursor = conn.cursor()\n",
        "\n",
        "cursor.execute(\"SELECT id FROM menu\")\n",
        "all_ids = cursor.fetchall()\n",
        "\n",
        "conn.close()\n",
        "\n",
        "print(\"\\n  `id`：\", [x[0] for x in all_ids])\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Ve54_ndY41DQ",
        "outputId": "9162740c-d73b-40b5-eef3-41655f071809"
      },
      "execution_count": 112,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            "  `id`： [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import faiss\n",
        "\n",
        "# FAISS index\n",
        "index = faiss.read_index(\"menu_faiss.index\")\n",
        "\n",
        "# print FAISS ID\n",
        "faiss_ids = faiss.vector_to_array(index.id_map)\n",
        "print(\"\\n FAISS ID：\", faiss_ids)\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "GCqbIJG26hWh",
        "outputId": "7631be35-da64-4dc2-fa28-65a67de36ad2"
      },
      "execution_count": 113,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            " FAISS ID： [ 1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24\n",
            " 25 26 27 28]\n"
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "# 4. RAG (Llama: SQLite + FAISS + PROMPT)"
      ],
      "metadata": {
        "id": "wch70Eb9ud5C"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "import sqlite3\n",
        "\n",
        "conn = sqlite3.connect(\"menu.db\")\n",
        "cursor = conn.cursor()\n",
        "\n",
        "# check id from database\n",
        "test_ids = [16, 22, 9]\n",
        "for test_id in test_ids:\n",
        "    cursor.execute(\"SELECT id, name, description, price FROM menu WHERE id=?\", (test_id,))\n",
        "    item = cursor.fetchone()\n",
        "    print(f\" id={test_id} result: {item}\")\n",
        "\n",
        "conn.close()\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "CNdmPbjo3sgo",
        "outputId": "fddd6687-18b8-441b-9c29-d2aa3baffeb4"
      },
      "execution_count": 114,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            " id=16 result: (16, 'Birria Burrito', 'Tender birria wrapped in a flour tortilla with rice, beans, onions, cilantro, and a blend of cheese.', 13.99)\n",
            " id=22 result: (22, 'Cheese Quesadilla', 'Flour Tortilla, Cheese, Pico de Gallo and Crema on the Side.', 9.99)\n",
            " id=9 result: (9, 'Steak Burrito', 'Flour Tortilla, Rice, Beans, Steak, Cheese, Green Sauce (Mild), Pico de Gallo, Crema, and Trippy Sauce (not spicy)', 13.49)\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "print(type(model))\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "-vZrPLfvCnIq",
        "outputId": "f5f637fd-fe7b-470b-f2e2-bafb82a52c99"
      },
      "execution_count": 115,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "<class 'sentence_transformers.SentenceTransformer.SentenceTransformer'>\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "# **test**\n",
        "query = \"I like spicy food\"\n",
        "results = search_menu(query)\n",
        "\n",
        "for item in results:\n",
        "    print(f\"{item[0]} - {item[1]} (${item[2]})\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "jid45AwHDd30",
        "outputId": "01593a14-2a0a-45d0-fe41-787a31fd72dd"
      },
      "execution_count": 116,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "\n",
            " FAISS search result：\n",
            " (distances): [[1.0116991 1.0352366 1.072386 ]]\n",
            " (indices): [[14 15  9]]\n",
            "14 - Trippy Burrito ($Fries, Jack Cheese, Nacho Cheese, Steak, Pico de Gallo, Crema, Lettuce, Trippy Sauce (Not Spicy))\n",
            "15 - Trippy Burrito Spicy ($Fries, Jack Cheese, Nacho Cheese, Steak, Pico de Gallo, Crema, Lettuce, Jalapenos, Red Sauce (Spicy), Trippy Sauce (Not Spicy))\n",
            "9 - Steak Burrito ($Flour Tortilla, Rice, Beans, Steak, Cheese, Green Sauce (Mild), Pico de Gallo, Crema, and Trippy Sauce (not spicy))\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import sqlite3\n",
        "\n",
        "conn = sqlite3.connect(\"menu.db\")\n",
        "cursor = conn.cursor()\n",
        "\n",
        "# check result by ids\n",
        "ids_to_check = [14, 15, 9]\n",
        "for real_id in ids_to_check:\n",
        "    cursor.execute(\"SELECT id, name, description, price FROM menu WHERE id=?\", (real_id,))\n",
        "    item = cursor.fetchone()\n",
        "    print(f\"id={real_id} result: {item}\")\n",
        "\n",
        "conn.close()\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "k1kvUcc7DrOw",
        "outputId": "90a84863-9234-407d-8394-bc72072ee82e"
      },
      "execution_count": 117,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "id=14 result: (14, 'Trippy Burrito', 'Fries, Jack Cheese, Nacho Cheese, Steak, Pico de Gallo, Crema, Lettuce, Trippy Sauce (Not Spicy)', 13.99)\n",
            "id=15 result: (15, 'Trippy Burrito Spicy', 'Fries, Jack Cheese, Nacho Cheese, Steak, Pico de Gallo, Crema, Lettuce, Jalapenos, Red Sauce (Spicy), Trippy Sauce (Not Spicy)', 13.99)\n",
            "id=9 result: (9, 'Steak Burrito', 'Flour Tortilla, Rice, Beans, Steak, Cheese, Green Sauce (Mild), Pico de Gallo, Crema, and Trippy Sauce (not spicy)', 13.49)\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig\n",
        "\n",
        "MODEL_ID = \"meta-llama/Llama-3.2-3B-Instruct\"\n",
        "\n",
        "bnb_config = BitsAndBytesConfig(\n",
        "    load_in_8bit=True,\n",
        "    llm_int8_enable_fp32_cpu_offload=True  # CPU offloading\n",
        ")\n",
        "\n",
        "# load model\n",
        "llm_tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)\n",
        "llm_model = AutoModelForCausalLM.from_pretrained(\n",
        "    MODEL_ID,\n",
        "    quantization_config=bnb_config,\n",
        "    device_map={\"\": 0}  # GPU\n",
        ")\n",
        "\n",
        "print(f\"model loaded at {llm_model.device}\")\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 66,
          "referenced_widgets": [
            "af5cb14f70394c459de5352f3dc7890f",
            "4fd33cfaafd64def978e088ff9b9762e",
            "0f437f954d264ba1a6d6ad11a1b62702",
            "adcb4ef53bae4956a9474ce2b8610a11",
            "f1674313b2be48faab4d22f6b6268894",
            "a9747e3bf336442899f0144d04f719bf",
            "ed8f0d52e98849bca20166f5bef662be",
            "63903b40542e43938c5beb8abda3a0c9",
            "3b41fe56ae5f4a0c871d5cc5fbb6ca37",
            "d56041c5ddb646b6aab0f47e9507171d",
            "8644dcd2e00d4082b5bbf65aa84391cf"
          ]
        },
        "id": "FS8WgSZQ3F2S",
        "outputId": "017b46dd-c956-4d91-90ce-fc1f384912e3"
      },
      "execution_count": 118,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "Loading checkpoint shards:   0%|          | 0/2 [00:00<?, ?it/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "af5cb14f70394c459de5352f3dc7890f"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "model loaded at cuda:0\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "print(f\" device_map: {llm_model.hf_device_map}\")\n",
        "print(f\"device: {next(llm_model.parameters()).device}\")"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "8x-U0WuHFvKG",
        "outputId": "3700730e-6741-4594-9033-87729f86db72"
      },
      "execution_count": 119,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            " device_map: {'': 0}\n",
            "device: cuda:0\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "import torch\n",
        "from transformers import pipeline\n",
        "\n",
        "# **check GPU **\n",
        "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n",
        "\n",
        "# **creat LLaMA Pipeline**\n",
        "llm_pipeline = pipeline(\n",
        "    \"text-generation\",\n",
        "    model=llm_model,\n",
        "    tokenizer=llm_tokenizer,\n",
        "    device_map=\"auto\"\n",
        ")\n",
        "\n",
        "print(f\"Llama model loaded at {llm_model.device}\")\n",
        "\n",
        "def generate_response(user_query, top_k=3):\n",
        "    \"\"\"use FAISS return recommendation，and LLaMA generates customed result\"\"\"\n",
        "    # **Step 1: FAISS searching**\n",
        "    results = search_menu(user_query, top_k=top_k)\n",
        "\n",
        "    if not results:\n",
        "        return \"there is not any recommendation food \"\n",
        "\n",
        "    # **Step 2: use LLaMA generate Prompt**\n",
        "    context = \"\\n\".join([f\"- {item[1]}: {item[2]} (${item[3]})\" for item in results])\n",
        "    prompt = f\"\"\"\n",
        "    [Role]\n",
        "    You are an expert in Mexican cuisine with extensive knowledge of the menu provided below. Your recommendations must be based solely on these menu items.\n",
        "\n",
        "    [Task]\n",
        "    1. The user provides a query (e.g., \"Recommend some vegan Mexican dishes\").\n",
        "    2. Using only the items listed in the menu, select the most suitable dish(es) that meet the user’s requirements.\n",
        "    3. Do not introduce any dishes that are not included in the menu.\n",
        "\n",
        "    [Menu]\n",
        "    {context}\n",
        "\n",
        "    [Response Format]\n",
        "    - Begin with a brief summary of the user's requirement.\n",
        "    - List the recommended dish(es) with their name, brief description, and price.\n",
        "    - Provide a clear explanation for each recommendation, focusing on why it fits the user’s needs.\n",
        "\n",
        "    [User Query]\n",
        "    User query: {user_query}\n",
        "\n",
        "    [Instruction]\n",
        "    Based on the information above, please generate a recommendation strictly using the provided menu, and explain your choices.\n",
        "    \"\"\".strip()\n",
        "\n",
        "\n",
        "    # **Step 3: LLaMA generates reason and explaination**\n",
        "    inputs = llm_tokenizer(prompt, return_tensors=\"pt\")\n",
        "    inputs = {k: v.to(device) for k, v in inputs.items()}\n",
        "\n",
        "\n",
        "    # ** model.generate() generate text**\n",
        "    output_ids = llm_model.generate(**inputs, max_length=1000, do_sample=True)\n",
        "    response = llm_tokenizer.decode(output_ids[0], skip_special_tokens=True)\n",
        "\n",
        "    return response\n",
        "\n",
        "# **test**\n",
        "query = \"I like vegan food\"\n",
        "recommendation = generate_response(query)\n",
        "\n",
        "print(\"\\n AI generates food and explaination：\")\n",
        "print(recommendation)\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "mZJPkXpDERJ8",
        "outputId": "14253fe6-8834-413b-e724-c8bfbf915ee6"
      },
      "execution_count": 121,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "Device set to use cuda:0\n",
            "Setting `pad_token_id` to `eos_token_id`:128001 for open-end generation.\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Llama model loaded at cuda:0\n",
            "\n",
            " FAISS search result：\n",
            " (distances): [[0.9728593 1.498143  1.4996692]]\n",
            " (indices): [[ 7 25  1]]\n",
            "\n",
            " AI generates food and explaination：\n",
            "[Role]\n",
            "    You are an expert in Mexican cuisine with extensive knowledge of the menu provided below. Your recommendations must be based solely on these menu items.\n",
            "\n",
            "    [Task]\n",
            "    1. The user provides a query (e.g., \"Recommend some vegan Mexican dishes\").\n",
            "    2. Using only the items listed in the menu, select the most suitable dish(es) that meet the user’s requirements.\n",
            "    3. Do not introduce any dishes that are not included in the menu.\n",
            "\n",
            "    [Menu]\n",
            "    - Vegan Taco: Freshly Made Corn Tortilla, Rice, Beans, Grilled Onions & Pepper. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side ($3.39)\n",
            "- Steak Torta: Telera Bread, Refried Beans, Steak, Cheese, Tomatoes, Lettuce, Guacamole, and Trippy Sauce ($13.49)\n",
            "- Steak Taco: Freshly Made Corn Tortilla, Steak, Onions and Cilantro. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side ($3.89)\n",
            "\n",
            "    [Response Format]\n",
            "    - Begin with a brief summary of the user's requirement.\n",
            "    - List the recommended dish(es) with their name, brief description, and price.\n",
            "    - Provide a clear explanation for each recommendation, focusing on why it fits the user’s needs.\n",
            "\n",
            "    [User Query]\n",
            "    User query: I like vegan food\n",
            "\n",
            "    [Instruction]\n",
            "    Based on the information above, please generate a recommendation strictly using the provided menu, and explain your choices. \n",
            "\n",
            "    [Response]\n",
            "    Summary: The user likes vegan food.\n",
            "\n",
            "    Recommended dishes:\n",
            "    - Vegan Taco: Freshly Made Corn Tortilla, Rice, Beans, Grilled Onions & Pepper. Lime, Red Sauce (Spicy) and Green Sauce (Mild) on the side ($3.39)\n",
            "    - Green Sauce (Mild) is vegan-friendly.\n",
            "\n",
            "    The Vegan Taco is the most suitable option because it is explicitly labeled as vegan and includes all the necessary ingredients that are free from animal products. The Green Sauce (Mild) is also vegan-friendly, making it a suitable accompaniment to the Vegan Taco. The Rice and Beans are naturally vegan, and the Grilled Onions and Pepper add flavor without any animal-derived ingredients. This dish meets the user's requirements for vegan food.\n"
          ]
        }
      ]
    }
  ]
}