{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### LMDeploy（模型部署工具）\n",
    "\n",
    "部署工具两大作用\n",
    "1. 模型部署\n",
    "2. 模型量化"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 安装(CUDA >= 11.3)\n",
    "\n",
    "1. pip install lmdeploy(python3.8~3.12)\n",
    "\n",
    "2. git clone https://github.com/InternLM/lmdeploy.git\n",
    "\n",
    "3. cuda > 12\n",
    "\n",
    "4. pip install -e"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 使用\n",
    "* 离线处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "vscode": {
     "languageId": "plaintext"
    }
   },
   "outputs": [],
   "source": [
    "import lmdeploy\n",
    "pipe = lmdeploy.pipeline('模型路径')\n",
    "response = pipe(['你好','你是谁'])\n",
    "print(response)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "* 选择引擎\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "vscode": {
     "languageId": "plaintext"
    }
   },
   "outputs": [],
   "source": [
    "from lmdeploy import pipeline,PytorchEngineConfig\n",
    "pipe = pipeline('模型路径',backend_confgi=PytorchEngineConfig(\n",
    "    max_batch_size=1,\n",
    "    enable_prefix_cachine=True,\n",
    "    cache_max_entry_count=0.8,\n",
    "    seesion_len=8192\n",
    "))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "* LLM模型服务\n",
    "\n",
    "    lmdeploy serve api_server 模型路径，默认在23333端口"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "* python调用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "vscode": {
     "languageId": "plaintext"
    }
   },
   "outputs": [],
   "source": [
    "from openai import OpenAI\n",
    "client = OpenAI(\n",
    "    api_key = 'YOUR_API_KEY', #可以不给\n",
    "    base_url = \"http://0.0.0.0:23333/v1\" #可以不给\n",
    ")\n",
    "model_name = client.models.list().data[0].id\n",
    "response = clientchat.completions.create(\n",
    "    model = model_name,\n",
    "    messages = [\n",
    "        {\"role\": \"system\", \"content\": \"Hello!\"},\n",
    "        {\"role\": \"user\", \"content\": \"Hello!\"}\n",
    "    ],\n",
    "    temperature = 0.8,\n",
    "    top_p=0.8\n",
    ")"
   ]
  }
 ],
 "metadata": {
  "language_info": {
   "name": "python"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
