{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "4084b0c8-aa9b-4804-ae8c-aeb2d6475ebd",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: https://pypi.tuna.tsinghua.edu.cn/simple\n",
      "Collecting transformers\n",
      "  Downloading https://pypi.tuna.tsinghua.edu.cn/packages/db/88/1ef8a624a33d7fe460a686b9e0194a7916320fc0d67d4e38e570beeac039/transformers-4.46.0-py3-none-any.whl (10.0 MB)\n",
      "     ---------------------------------------- 0.0/10.0 MB ? eta -:--:--\n",
      "     - -------------------------------------- 0.5/10.0 MB 14.2 MB/s eta 0:00:01\n",
      "     ---- ----------------------------------- 1.1/10.0 MB 17.4 MB/s eta 0:00:01\n",
      "     -------- ------------------------------- 2.1/10.0 MB 16.7 MB/s eta 0:00:01\n",
      "     ------------- -------------------------- 3.3/10.0 MB 21.1 MB/s eta 0:00:01\n",
      "     ----------------- ---------------------- 4.4/10.0 MB 21.5 MB/s eta 0:00:01\n",
      "     ---------------------- ----------------- 5.7/10.0 MB 21.4 MB/s eta 0:00:01\n",
      "     ----------------------------- ---------- 7.4/10.0 MB 23.6 MB/s eta 0:00:01\n",
      "     ------------------------------------ --- 9.0/10.0 MB 26.3 MB/s eta 0:00:01\n",
      "     ------------------------------------- -- 9.3/10.0 MB 23.8 MB/s eta 0:00:01\n",
      "     --------------------------------------- 10.0/10.0 MB 23.8 MB/s eta 0:00:00\n",
      "Requirement already satisfied: filelock in d:\\python\\python312\\lib\\site-packages (from transformers) (3.13.1)\n",
      "Collecting huggingface-hub<1.0,>=0.23.2 (from transformers)\n",
      "  Downloading https://pypi.tuna.tsinghua.edu.cn/packages/d7/4d/017d8d7cff5100092da8ea19139bcb1965bbadcbb5ddd0480e2badc299e8/huggingface_hub-0.26.1-py3-none-any.whl (447 kB)\n",
      "     ---------------------------------------- 0.0/447.4 kB ? eta -:--:--\n",
      "     ------------------------------------- 447.4/447.4 kB 29.1 MB/s eta 0:00:00\n",
      "Requirement already satisfied: numpy>=1.17 in d:\\python\\python312\\lib\\site-packages (from transformers) (1.26.2)\n",
      "Requirement already satisfied: packaging>=20.0 in d:\\python\\python312\\lib\\site-packages (from transformers) (23.2)\n",
      "Requirement already satisfied: pyyaml>=5.1 in d:\\python\\python312\\lib\\site-packages (from transformers) (6.0.1)\n",
      "Collecting regex!=2019.12.17 (from transformers)\n",
      "  Downloading https://pypi.tuna.tsinghua.edu.cn/packages/6e/16/efc5f194778bf43e5888209e5cec4b258005d37c613b67ae137df3b89c53/regex-2024.9.11-cp312-cp312-win_amd64.whl (273 kB)\n",
      "     ---------------------------------------- 0.0/273.5 kB ? eta -:--:--\n",
      "     ---------------------------------------- 273.5/273.5 kB ? eta 0:00:00\n",
      "Requirement already satisfied: requests in d:\\python\\python312\\lib\\site-packages (from transformers) (2.31.0)\n",
      "Collecting safetensors>=0.4.1 (from transformers)\n",
      "  Downloading https://pypi.tuna.tsinghua.edu.cn/packages/f1/5f/ab6b6cec85b40789801f35b7d2fb579ae242d8193929974a106d5ff5c835/safetensors-0.4.5-cp312-none-win_amd64.whl (286 kB)\n",
      "     ---------------------------------------- 0.0/286.3 kB ? eta -:--:--\n",
      "     - -------------------------------------- 10.2/286.3 kB ? eta -:--:--\n",
      "     -------------------------------------- 286.3/286.3 kB 4.5 MB/s eta 0:00:00\n",
      "Collecting tokenizers<0.21,>=0.20 (from transformers)\n",
      "  Downloading https://pypi.tuna.tsinghua.edu.cn/packages/7e/ba/18bf6a7ad04f8225b71aa862b57188748d1d81e268de4a9aac1aed237246/tokenizers-0.20.1-cp312-none-win_amd64.whl (2.4 MB)\n",
      "     ---------------------------------------- 0.0/2.4 MB ? eta -:--:--\n",
      "     -- ------------------------------------- 0.1/2.4 MB 7.5 MB/s eta 0:00:01\n",
      "     --------------- ------------------------ 0.9/2.4 MB 11.9 MB/s eta 0:00:01\n",
      "     ---------------------------------------  2.4/2.4 MB 18.9 MB/s eta 0:00:01\n",
      "     ---------------------------------------- 2.4/2.4 MB 18.9 MB/s eta 0:00:00\n",
      "Collecting tqdm>=4.27 (from transformers)\n",
      "  Downloading https://pypi.tuna.tsinghua.edu.cn/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl (78 kB)\n",
      "     ---------------------------------------- 0.0/78.4 kB ? eta -:--:--\n",
      "     ---------------------------------------- 78.4/78.4 kB 4.5 MB/s eta 0:00:00\n",
      "Requirement already satisfied: fsspec>=2023.5.0 in d:\\python\\python312\\lib\\site-packages (from huggingface-hub<1.0,>=0.23.2->transformers) (2023.12.2)\n",
      "Requirement already satisfied: typing-extensions>=3.7.4.3 in d:\\python\\python312\\lib\\site-packages (from huggingface-hub<1.0,>=0.23.2->transformers) (4.9.0)\n",
      "Requirement already satisfied: colorama in d:\\python\\python312\\lib\\site-packages (from tqdm>=4.27->transformers) (0.4.6)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in d:\\python\\python312\\lib\\site-packages (from requests->transformers) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python\\python312\\lib\\site-packages (from requests->transformers) (3.6)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python\\python312\\lib\\site-packages (from requests->transformers) (2.1.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python\\python312\\lib\\site-packages (from requests->transformers) (2023.11.17)\n",
      "Installing collected packages: tqdm, safetensors, regex, huggingface-hub, tokenizers, transformers\n",
      "Successfully installed huggingface-hub-0.26.1 regex-2024.9.11 safetensors-0.4.5 tokenizers-0.20.1 tqdm-4.66.5 transformers-4.46.0\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "[notice] A new release of pip is available: 23.2.1 -> 24.3\n",
      "[notice] To update, run: python.exe -m pip install --upgrade pip\n"
     ]
    }
   ],
   "source": [
    "# pip install transformers -i https://pypi.tuna.tsinghua.edu.cn/simple"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "ba605e92-f0d3-4bfd-9392-a41eced13f5b",
   "metadata": {},
   "outputs": [],
   "source": [
    "from transformers import pipeline #使用HuggingFace中的GPT2模型，进行文本生成的任务"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "8eb6cafc-197e-47a1-a124-f74c43686d70",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "a557eaa6a22a44c98422ba675293c2fc",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "config.json:   0%|          | 0.00/665 [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\Python\\Python312\\Lib\\site-packages\\huggingface_hub\\file_download.py:139: UserWarning: `huggingface_hub` cache-system uses symlinks by default to efficiently store duplicated files but your machine does not support them in C:\\Users\\Aelous\\.cache\\huggingface\\hub\\models--gpt2. Caching files will still work but in a degraded version that might require more space on your disk. This warning can be disabled by setting the `HF_HUB_DISABLE_SYMLINKS_WARNING` environment variable. For more details, see https://huggingface.co/docs/huggingface_hub/how-to-cache#limitations.\n",
      "To support symlinks on Windows, you either need to activate Developer Mode or to run Python as an administrator. In order to activate developer mode, see this article: https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development\n",
      "  warnings.warn(message)\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "28edf2748edd4346a730c1b1f6d6fa89",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "model.safetensors:   0%|          | 0.00/548M [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "a8b7a11403984934ac6218f8222d52ec",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "generation_config.json:   0%|          | 0.00/124 [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "ce06ac6261b54d688ede6f287ad59492",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "tokenizer_config.json:   0%|          | 0.00/26.0 [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "f0e6e72b9e514f20a4b08c4bf1aef76b",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "vocab.json:   0%|          | 0.00/1.04M [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "f1920eb6d07747dab6a7a2f7f14b6156",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "merges.txt:   0%|          | 0.00/456k [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "fe31685ce24b4d418adf8368f465a8bc",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "tokenizer.json:   0%|          | 0.00/1.36M [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "model_name = \"gpt2\"\n",
    "llm = pipeline(\"text-generation\", model=model_name)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0627fd90-297e-43f3-b5c6-504f9a8ccb57",
   "metadata": {},
   "outputs": [],
   "source": [
    "class UAVSimulator:\n",
    "    def __init__(self):\n",
    "        self.is_flying = False\n",
    "        self.position = [0, 0, 0]\n",
    "\n",
    "    def takeoff(self):\n",
    "        if not self.is_flying:\n",
    "            self.is_flying = True\n",
    "            self.position[2] = 10\n",
    "            return \"UAV has taken off and is at an altitude of 10 meters.\"\n",
    "        return \"UAV is already in the air.\"\n",
    "\n",
    "    def land(self):\n",
    "        if self.is_flying:\n",
    "            self.is_flying = False\n",
    "            self.position[2] = 0\n",
    "            return \"UAV has landed safely.\"\n",
    "        return \"UAV is already on the ground.\"\n",
    "\n",
    "    def move(self, direction, distance):\n",
    "        if not self.is_flying:\n",
    "            return \"UAV is on the ground. Take off first to move.\"\n",
    "        \n",
    "        direction_map = {\n",
    "            'forward': (1, 0, 0),\n",
    "            'backward': (-1, 0, 0),\n",
    "            'left': (0, -1, 0),\n",
    "            'right': (0, 1, 0),\n",
    "            'up': (0, 0, 1),\n",
    "            'down': (0, 0, -1)\n",
    "        }\n",
    "\n",
    "        if direction in direction_map:\n",
    "            self.position = [\n",
    "                self.position[i] + direction_map[direction][i] * distance\n",
    "                for i in range(3)\n",
    "            ]\n",
    "            return f\"UAV moved {direction} by {distance} meters.\"\n",
    "        return f\"Unknown direction '{direction}'.\"\n",
    "\n",
    "    def get_status(self):\n",
    "        return f\"UAV status: {'Flying' if self.is_flying else 'On ground'}, Position: {self.position}\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "c5cb67eb-9d7f-46e1-b69f-a77f1d3bc9e5",
   "metadata": {},
   "outputs": [],
   "source": [
    "#使用 GPT-2 模型解析输入命令，模拟 LLM 的指令解析，并返回适当的操作类型。\n",
    "def parse_command(input_text):\n",
    "    result = llm(input_text, max_length=50, num_return_sequences=1)\n",
    "    generated_text = result[0][\"generated_text\"]\n",
    "    print(f\"LLM Output: {generated_text}\")\n",
    "    \n",
    "    if \"takeoff\" in generated_text.lower():\n",
    "        return \"takeoff\", None, None\n",
    "    elif \"land\" in generated_text.lower():\n",
    "        return \"land\", None, None\n",
    "    elif \"move\" in generated_text.lower():\n",
    "        for direction in ['forward', 'backward', 'left', 'right', 'up', 'down']:\n",
    "            if direction in generated_text.lower():\n",
    "                words = generated_text.lower().split()\n",
    "                for i in range(len(words)):\n",
    "                    if words[i] == direction:\n",
    "                        try:\n",
    "                            distance = int(words[i + 2])\n",
    "                            return \"move\", direction, distance\n",
    "                        except:\n",
    "                            return \"move\", direction, 1\n",
    "    return None, None, None"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8efbad96-f79b-4837-b7c6-ea06c3561e6d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Welcome to the Drone Simulator!\n",
      "Drone status: On ground, Position: [0, 0, 0]\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "Enter command for the drone:  could you please take off?\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Truncation was not explicitly activated but `max_length` is provided a specific value, please use `truncation=True` to explicitly truncate examples to max length. Defaulting to 'longest_first' truncation strategy. If you encode pairs of sequences (GLUE-style) with the tokenizer you can select this strategy more precisely by providing a specific strategy to `truncation`.\n",
      "Setting `pad_token_id` to `eos_token_id`:None for open-end generation.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LLM Output: could you please take off?\n",
      "\n",
      "\n",
      "How about this picture that got shared yesterday?\n",
      "\n",
      "\n",
      "And of course, \"the world is still waiting\"!\n",
      "\n",
      "\n",
      "Thanks everyone!\n",
      "\n",
      "\n",
      "Reply · Report Post\n",
      "Drone has taken off and is at an altitude of 10 meters.\n",
      "Drone status: Flying, Position: [0, 0, 10]\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "Enter command for the drone:  please land\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Setting `pad_token_id` to `eos_token_id`:None for open-end generation.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LLM Output: please land with your name on a page as well?\n",
      "\n",
      "With all the changes coming to the desktop browser, it's time for Firefox to move away from the traditional browsing experience. As far as mobile websites go, the desktop browser can do just\n",
      "Drone has landed safely.\n",
      "Drone status: On ground, Position: [0, 0, 0]\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "Enter command for the drone:  Move the drone upwards by 3 meters.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Setting `pad_token_id` to `eos_token_id`:None for open-end generation.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LLM Output: Move the drone upwards by 3 meters.\n",
      "\n",
      "You can turn the drone off at any time, using the same techniques that you follow.\n",
      "\n",
      "When you aim the remote at something, the drone will fly into your hand.\n",
      "\n",
      "When it\n",
      "Unknown command. Please try again.\n",
      "Drone status: On ground, Position: [0, 0, 0]\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "Enter command for the drone:  I want the drone to land.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Setting `pad_token_id` to `eos_token_id`:None for open-end generation.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LLM Output: I want the drone to land. The question is: do you know enough to do the math?\" Coyle asked.\n",
      "\n",
      "I asked whether the government is collecting such data because a large percentage of UAVs are being used by the military to\n",
      "Drone is already on the ground.\n",
      "Drone status: On ground, Position: [0, 0, 0]\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "Enter command for the drone:  I want the drone to landfall.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Setting `pad_token_id` to `eos_token_id`:None for open-end generation.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LLM Output: I want the drone to landfall. The plane doesn't look like it has been used for a day or two for that matter. It's all too much to bear for us to have an entire plane be there at night. If we can't get\n",
      "Drone is already on the ground.\n",
      "Drone status: On ground, Position: [0, 0, 0]\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "Enter command for the drone:  I want the drone to fall.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Setting `pad_token_id` to `eos_token_id`:None for open-end generation.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LLM Output: I want the drone to fall.\n",
      "\n",
      "In late February 2014, the U.S. government released two videos showing an airplane crashing on a remote part of the planet. The video shows a drone being dragged at high speed to the edge of the\n",
      "Unknown command. Please try again.\n",
      "Drone status: On ground, Position: [0, 0, 0]\n"
     ]
    }
   ],
   "source": [
    "def main():\n",
    "    UAV = UAVSimulator()\n",
    "    print(\"Welcome to the UAV Simulator!\")\n",
    "    print(UAV.get_status())\n",
    "\n",
    "    while True:\n",
    "        user_input = input(\"Enter command for the UAV: \")\n",
    "        \n",
    "        action_type, direction, distance = parse_command(user_input)\n",
    "        \n",
    "        if action_type == \"takeoff\":\n",
    "            print(UAV.takeoff())\n",
    "        elif action_type == \"land\":\n",
    "            print(UAV.land())\n",
    "        elif action_type == \"move\":\n",
    "            print(UAV.move(direction, distance))\n",
    "        elif user_input.lower() in [\"exit\", \"quit\"]:\n",
    "            print(\"Exiting the UAV Simulator. Goodbye!\")\n",
    "            break\n",
    "        else:\n",
    "            print(\"Unknown command. Please try again.\")\n",
    "        \n",
    "        print(UAV.get_status())\n",
    "\n",
    "if __name__ == \"__main__\":\n",
    "    main()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fef3414c-9acc-4a47-b859-3876bb4b210d",
   "metadata": {},
   "outputs": [],
   "source": [
    "#测试实例：\n",
    "# “UAV, could you please take off now?”\n",
    "# “Move the UAV upwards by 3 meters.”\n",
    "# “UAV, land immediately.”"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
