{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "41ce62a8-251f-4f9e-b375-e93a5861c3fe",
   "metadata": {
    "id": "41ce62a8-251f-4f9e-b375-e93a5861c3fe"
   },
   "source": [
    "f# Rag从入门到精通\n",
    "\n",
    "这些笔记本介绍了从头开始构建 RAG 应用程序的过程。\n",
    "\n",
    "他们将对 RAG 语言景观有更广泛的了解"
   ]
  },
  {
   "cell_type": "code",
   "id": "3a88555a-53a5-4ab8-ba3d-e6dd3a26c71a",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "3a88555a-53a5-4ab8-ba3d-e6dd3a26c71a",
    "outputId": "f0a36a55-ea3b-408f-aea3-1b21554aa85e",
    "tags": [],
    "jupyter": {
     "is_executing": true
    }
   },
   "source": [
    "! pip install -q langchain_community tiktoken langchain-openai langchainhub  langchain\n",
    "! pip install -q chromadb==0.4.15\n",
    "! pip install -q beautifulsoup4\n",
    "! pip install --upgrade httpx httpx-sse PyJWT\n",
    "! pip install -q --upgrade --quiet  dashscope"
   ],
   "outputs": [],
   "execution_count": null
  },
  {
   "cell_type": "markdown",
   "id": "e3bd0740-4c40-48df-954e-e6c3cd5a613a",
   "metadata": {
    "id": "e3bd0740-4c40-48df-954e-e6c3cd5a613a"
   },
   "source": [
    "# **请输入自己的API key**"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "e53ddc9f-9ab0-4769-b4c9-6ca9e399e862",
   "metadata": {
    "id": "e53ddc9f-9ab0-4769-b4c9-6ca9e399e862"
   },
   "outputs": [],
   "source": [
    "from google.colab import userdata\n",
    "DASHSCOPE_API_KEY=userdata.get('DASHSCOPE_API_KEY')\n",
    "DEEPSEEK_API_KEY=userdata.get('DEEPSEEK_API_KEY')\n",
    "LANGCHAIN_API_KEY=userdata.get('LANGCHAIN_API_KEY')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "b76f68a8-4745-4377-8057-6090b87377d1",
   "metadata": {
    "ExecutionIndicator": {
     "show": false
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T03:00:31.326582Z",
     "iopub.status.busy": "2024-10-28T03:00:31.326310Z",
     "iopub.status.idle": "2024-10-28T03:00:31.330122Z",
     "shell.execute_reply": "2024-10-28T03:00:31.329697Z",
     "shell.execute_reply.started": "2024-10-28T03:00:31.326569Z"
    },
    "id": "b76f68a8-4745-4377-8057-6090b87377d1",
    "tags": []
   },
   "outputs": [],
   "source": [
    "from langchain_community.embeddings import DashScopeEmbeddings\n",
    "import os\n",
    "os.environ[\"LANGCHAIN_PROJECT\"] = f\"RAG_基础\"\n",
    "os.environ['LANGCHAIN_TRACING_V2'] = 'true'\n",
    "os.environ['LANGCHAIN_ENDPOINT'] = 'https://api.smith.langchain.com'\n",
    "os.environ['LANGCHAIN_API_KEY'] = LANGCHAIN_API_KEY\n",
    "os.environ['USER_AGENT'] = 'myagent'\n",
    "os.environ['DEEPSEEK_API_KEY'] = DEEPSEEK_API_KEY\n",
    "os.environ[\"DASHSCOPE_API_KEY\"] = DASHSCOPE_API_KEY"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "7e80e0a6-344d-4d62-a149-c10f1b1b7c3d",
   "metadata": {
    "ExecutionIndicator": {
     "show": false
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T03:00:20.952328Z",
     "iopub.status.busy": "2024-10-28T03:00:20.952062Z",
     "iopub.status.idle": "2024-10-28T03:00:22.816405Z",
     "shell.execute_reply": "2024-10-28T03:00:22.816004Z",
     "shell.execute_reply.started": "2024-10-28T03:00:20.952313Z"
    },
    "tags": [],
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "7e80e0a6-344d-4d62-a149-c10f1b1b7c3d",
    "outputId": "14ccd71f-247a-4e9c-86e8-7d86a577c827"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "您好！我是由中国的深度求索（DeepSeek）公司开发的智能助手DeepSeek-R1。如您有任何任何问题，我会尽我所能为您提供帮助。\n"
     ]
    }
   ],
   "source": [
    "# from langchain_community.chat_models import ChatZhipuAI\n",
    "# from langchain_core.messages import AIMessage, HumanMessage, SystemMessage\n",
    "\n",
    "# llm = ChatZhipuAI(model=\"glm-4-plus\",temperature=0.5)\n",
    "# messages = [\n",
    "#     AIMessage(content=\"Hi.\"),\n",
    "#     SystemMessage(content=\"Your role is a poet.\"),\n",
    "#     HumanMessage(content=\"Write a short poem about AI in four lines.\"),\n",
    "# ]\n",
    "\n",
    "# response = llm.invoke(messages)\n",
    "# print(response.content)  # Displays the AI-generated poem\n",
    "\n",
    "# from langchain_community.chat_models.tongyi import ChatTongyi\n",
    "# from langchain_core.messages import HumanMessage\n",
    "\n",
    "# llm = ChatTongyi(\n",
    "#     streaming=True,\n",
    "# )\n",
    "# res = llm.stream([HumanMessage(content=\"hi\")], streaming=True)\n",
    "# for r in res:\n",
    "#     print(\"chat resp:\", r)\n",
    "from langchain_openai import ChatOpenAI\n",
    "llm = ChatOpenAI(\n",
    "    model='deepseek-reasoner',\n",
    "    openai_api_key=DEEPSEEK_API_KEY,\n",
    "    openai_api_base='https://api.deepseek.com'\n",
    ")\n",
    "\n",
    "response = llm.invoke(\"你好，请问你是deepseek R1吗？\")\n",
    "print(response.content)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1eae0ab7-d43b-43e0-8b99-6122a636fe0c",
   "metadata": {
    "id": "1eae0ab7-d43b-43e0-8b99-6122a636fe0c"
   },
   "source": [
    "## 第 1 部分：概述"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "98070313-0c2f-4ba6-ae3e-79e2418ce4df",
   "metadata": {
    "id": "98070313-0c2f-4ba6-ae3e-79e2418ce4df"
   },
   "outputs": [],
   "source": [
    "import bs4\n",
    "from langchain import hub\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "from langchain_community.document_loaders import WebBaseLoader\n",
    "from langchain_community.vectorstores import Chroma\n",
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from langchain_core.runnables import RunnablePassthrough\n",
    "from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",
    "\n",
    "#### 索引 ####\n",
    "\n",
    "# 加载文档\n",
    "loader = WebBaseLoader(\n",
    "    web_paths=(\"https://baike.baidu.com/item/%E4%BA%BA%E5%B7%A5%E6%99%BA%E8%83%BD/9180?fr=ge_ala\",)\n",
    ")\n",
    "docs = loader.load()\n",
    "\n",
    "# 切分\n",
    "text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)\n",
    "splits = text_splitter.split_documents(docs)\n",
    "\n",
    "# 嵌入\n",
    "vectorstore = Chroma.from_documents(documents=splits,\n",
    "                                    embedding=OpenAIEmbeddings())\n",
    "\n",
    "retriever = vectorstore.as_retriever(search_kwargs={\"k\":5})\n",
    "\n",
    "#### 检索和生成 ####\n",
    "\n",
    "# Prompt\n",
    "prompt = hub.pull(\"rlm/rag-prompt\")\n",
    "\n",
    "# LLM\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\", temperature=0)\n",
    "\n",
    "# 后期处理\n",
    "def format_docs(docs):\n",
    "    return \"\\n\\n\".join(doc.page_content for doc in docs)\n",
    "\n",
    "# Chain\n",
    "rag_chain = (\n",
    "    {\"context\": retriever | format_docs, \"question\": RunnablePassthrough()}\n",
    "    | prompt\n",
    "    | llm\n",
    "    | StrOutputParser()\n",
    ")\n",
    "\n",
    "# 问答\n",
    "# rag_chain.invoke(\"能否简答介绍下什么是人工智能?\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "18e8e856-bafd-469e-b99a-11596b18aad4",
   "metadata": {
    "id": "18e8e856-bafd-469e-b99a-11596b18aad4"
   },
   "source": [
    "## 第 2 部分：索引"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "edd7beeb-21fa-4f4b-b8fa-5a4f70489a16",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-10-28T02:54:41.417189Z",
     "iopub.status.busy": "2024-10-28T02:54:41.416913Z",
     "iopub.status.idle": "2024-10-28T02:54:41.420263Z",
     "shell.execute_reply": "2024-10-28T02:54:41.419892Z",
     "shell.execute_reply.started": "2024-10-28T02:54:41.417173Z"
    },
    "id": "edd7beeb-21fa-4f4b-b8fa-5a4f70489a16",
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 文件\n",
    "question = \"什么是人工智能？\"\n",
    "document = \"人工智能（Artificial Intelligence），英文缩写为AI，是新一轮科技革命和产业变革的重要驱动力量\""
   ]
  },
  {
   "cell_type": "markdown",
   "id": "4f04fd74-829f-472c-a1bc-ec6521a0529f",
   "metadata": {
    "id": "4f04fd74-829f-472c-a1bc-ec6521a0529f"
   },
   "source": [
    "词嵌入转化，翻译"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "6bd98786-755d-4d49-ba97-30c5a623b74e",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T02:57:44.464302Z",
     "iopub.status.busy": "2024-10-28T02:57:44.464002Z",
     "iopub.status.idle": "2024-10-28T02:57:44.902435Z",
     "shell.execute_reply": "2024-10-28T02:57:44.902025Z",
     "shell.execute_reply.started": "2024-10-28T02:57:44.464285Z"
    },
    "id": "6bd98786-755d-4d49-ba97-30c5a623b74e",
    "outputId": "80e989b6-1564-48bf-e58e-6b41604491f8",
    "tags": []
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "1536"
      ]
     },
     "metadata": {},
     "execution_count": 7
    }
   ],
   "source": [
    "embd=DashScopeEmbeddings(\n",
    "    model=\"text-embedding-v1\", dashscope_api_key=DASHSCOPE_API_KEY\n",
    ")\n",
    "query_result = embd.embed_query(question)\n",
    "document_result = embd.embed_query(document)\n",
    "len(query_result)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "f5e0e35f-6861-4c5e-9301-04fd5408f8f8",
   "metadata": {
    "id": "f5e0e35f-6861-4c5e-9301-04fd5408f8f8"
   },
   "source": [
    "## 翻译，看一下两者的相似度"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "b8001998-b08c-4560-b124-bfa1fced8958",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T02:57:47.598290Z",
     "iopub.status.busy": "2024-10-28T02:57:47.598039Z",
     "iopub.status.idle": "2024-10-28T02:57:47.602049Z",
     "shell.execute_reply": "2024-10-28T02:57:47.601631Z",
     "shell.execute_reply.started": "2024-10-28T02:57:47.598276Z"
    },
    "id": "b8001998-b08c-4560-b124-bfa1fced8958",
    "outputId": "ec8bae32-f87f-42ff-e529-471a26c01d05",
    "tags": []
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Cosine Similarity: 0.8333941339585715\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "\n",
    "def cosine_similarity(vec1, vec2):\n",
    "    dot_product = np.dot(vec1, vec2)\n",
    "    norm_vec1 = np.linalg.norm(vec1)\n",
    "    norm_vec2 = np.linalg.norm(vec2)\n",
    "    return dot_product / (norm_vec1 * norm_vec2)\n",
    "\n",
    "similarity = cosine_similarity(query_result, document_result)\n",
    "print(\"Cosine Similarity:\", similarity)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "8aea73bc-98e3-4fdc-ba72-d190736bed20",
   "metadata": {
    "id": "8aea73bc-98e3-4fdc-ba72-d190736bed20"
   },
   "source": [
    "加载文档"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "5778c31a-6138-4130-8865-31a08e82b9fb",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-10-28T02:56:39.428652Z",
     "iopub.status.busy": "2024-10-28T02:56:39.428305Z",
     "iopub.status.idle": "2024-10-28T02:56:39.858293Z",
     "shell.execute_reply": "2024-10-28T02:56:39.857901Z",
     "shell.execute_reply.started": "2024-10-28T02:56:39.428635Z"
    },
    "id": "5778c31a-6138-4130-8865-31a08e82b9fb",
    "tags": []
   },
   "outputs": [],
   "source": [
    "#### 索引 ####\n",
    "\n",
    "# 加载文档\n",
    "import bs4\n",
    "from langchain_community.document_loaders import WebBaseLoader\n",
    "loader = WebBaseLoader(\n",
    "    web_paths=(\"https://baike.baidu.com/item/%E4%BA%BA%E5%B7%A5%E6%99%BA%E8%83%BD/9180?fr=ge_ala\",)\n",
    ")\n",
    "blog_docs = loader.load()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "798e731e-c6ff-46e3-a8bc-386832362af2",
   "metadata": {
    "id": "798e731e-c6ff-46e3-a8bc-386832362af2"
   },
   "source": [
    "分割文档\n",
    "\n",
    "> 对于一般文本，推荐使用此文本分割器。 它由字符列表参数化。 它尝试按顺序分割它们，直到块足够小。 默认列表为 [\"\\n\\n\", \"\\n\", \" \", \"\"]。 这样做的效果是尝试将所有段落（然后是句子，然后是单词）尽可能地放在一起，因为这些通常看起来是语义相关性最强的文本片段。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "e668d339-3951-4662-8387-c3d296646906",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-10-28T02:56:54.455432Z",
     "iopub.status.busy": "2024-10-28T02:56:54.455158Z",
     "iopub.status.idle": "2024-10-28T02:56:54.717058Z",
     "shell.execute_reply": "2024-10-28T02:56:54.716646Z",
     "shell.execute_reply.started": "2024-10-28T02:56:54.455415Z"
    },
    "id": "e668d339-3951-4662-8387-c3d296646906",
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 分割\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(\n",
    "    chunk_size=300,\n",
    "    chunk_overlap=50)\n",
    "\n",
    "# 进行分割\n",
    "splits = text_splitter.split_documents(blog_docs)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "427303a1-3ed4-430c-bfc7-cb3e48022f1d",
   "metadata": {
    "id": "427303a1-3ed4-430c-bfc7-cb3e48022f1d"
   },
   "source": [
    "向量数据库"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "baa90aaf-cc1b-46a1-9fba-cf20804dcb41",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T02:58:09.564701Z",
     "iopub.status.busy": "2024-10-28T02:58:09.564440Z",
     "iopub.status.idle": "2024-10-28T02:58:15.378863Z",
     "shell.execute_reply": "2024-10-28T02:58:15.378394Z",
     "shell.execute_reply.started": "2024-10-28T02:58:09.564687Z"
    },
    "id": "baa90aaf-cc1b-46a1-9fba-cf20804dcb41",
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 索引\n",
    "from langchain_openai import OpenAIEmbeddings\n",
    "from langchain_community.vectorstores import Chroma\n",
    "vectorstore = Chroma.from_documents(documents=splits,\n",
    "                                    embedding=embd)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ba890329-1411-4922-bd27-fe0490dd1208",
   "metadata": {
    "id": "ba890329-1411-4922-bd27-fe0490dd1208"
   },
   "source": [
    "## 第 3 部分：检索"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "fafdada1-4c4e-41f8-ad1a-33861aae3930",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-10-28T02:58:38.482728Z",
     "iopub.status.busy": "2024-10-28T02:58:38.482366Z",
     "iopub.status.idle": "2024-10-28T02:58:43.922476Z",
     "shell.execute_reply": "2024-10-28T02:58:43.922051Z",
     "shell.execute_reply.started": "2024-10-28T02:58:38.482712Z"
    },
    "id": "fafdada1-4c4e-41f8-ad1a-33861aae3930",
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 索引\n",
    "from langchain_openai import OpenAIEmbeddings\n",
    "from langchain_community.vectorstores import Chroma\n",
    "vectorstore = Chroma.from_documents(documents=splits,\n",
    "                                    embedding=embd)\n",
    "\n",
    "\n",
    "retriever = vectorstore.as_retriever(search_kwargs={\"k\": 5})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "57c2de7a-93e6-4072-bc5b-db6516f96dda",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T02:58:49.382305Z",
     "iopub.status.busy": "2024-10-28T02:58:49.382015Z",
     "iopub.status.idle": "2024-10-28T02:58:49.596592Z",
     "shell.execute_reply": "2024-10-28T02:58:49.596188Z",
     "shell.execute_reply.started": "2024-10-28T02:58:49.382290Z"
    },
    "id": "57c2de7a-93e6-4072-bc5b-db6516f96dda",
    "outputId": "4b9737e2-d4ef-4122-f634-91021955426a",
    "tags": []
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "<ipython-input-13-d8be032a70a5>:1: LangChainDeprecationWarning: The method `BaseRetriever.get_relevant_documents` was deprecated in langchain-core 0.1.46 and will be removed in 1.0. Use :meth:`~invoke` instead.\n",
      "  docs = retriever.get_relevant_documents(\"什么是人工智能？\")\n"
     ]
    }
   ],
   "source": [
    "docs = retriever.get_relevant_documents(\"什么是人工智能？\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "db96f877-60d3-4741-9846-e2903831583d",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T02:58:59.390253Z",
     "iopub.status.busy": "2024-10-28T02:58:59.389864Z",
     "iopub.status.idle": "2024-10-28T02:58:59.394196Z",
     "shell.execute_reply": "2024-10-28T02:58:59.393708Z",
     "shell.execute_reply.started": "2024-10-28T02:58:59.390238Z"
    },
    "id": "db96f877-60d3-4741-9846-e2903831583d",
    "outputId": "dfec5ba3-44b7-4966-9414-0cbc30c6e804",
    "tags": []
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "5"
      ]
     },
     "metadata": {},
     "execution_count": 14
    }
   ],
   "source": [
    "len(docs)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "beda1b07-7bd2-4f5b-8d44-1fc52f5d2ce2",
   "metadata": {
    "id": "beda1b07-7bd2-4f5b-8d44-1fc52f5d2ce2"
   },
   "source": [
    "## 第四部分：生成"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "8beb6c14-5e18-43e7-9d04-59e3b8a81cc9",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T02:59:39.827837Z",
     "iopub.status.busy": "2024-10-28T02:59:39.827556Z",
     "iopub.status.idle": "2024-10-28T02:59:39.832881Z",
     "shell.execute_reply": "2024-10-28T02:59:39.832532Z",
     "shell.execute_reply.started": "2024-10-28T02:59:39.827816Z"
    },
    "id": "8beb6c14-5e18-43e7-9d04-59e3b8a81cc9",
    "outputId": "94006fb8-5470-4206-d5bc-12a77a875901",
    "tags": []
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "ChatPromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, messages=[HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, template='您是问答任务的助手。使用以下检索到的上下文来回答问题。如果您不知道答案，就说您不知道。\\n{context}\\n\\n问题：{question}\\n'), additional_kwargs={})])"
      ]
     },
     "metadata": {},
     "execution_count": 15
    }
   ],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "from langchain.prompts import ChatPromptTemplate\n",
    "\n",
    "# Prompt\n",
    "# template = \"\"\"Answer the question based on the following retrieved context:\n",
    "# {context}\n",
    "\n",
    "# Question: {question}\n",
    "# \"\"\"\n",
    "\n",
    "\n",
    "template = \"\"\"您是问答任务的助手。使用以下检索到的上下文来回答问题。如果您不知道答案，就说您不知道。\n",
    "{context}\n",
    "\n",
    "问题：{question}\n",
    "\"\"\"\n",
    "\n",
    "prompt = ChatPromptTemplate.from_template(template)\n",
    "prompt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "55d6629f-18ec-4372-a557-b254fbb1dd2d",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-10-28T03:00:38.066448Z",
     "iopub.status.busy": "2024-10-28T03:00:38.066171Z",
     "iopub.status.idle": "2024-10-28T03:00:38.069167Z",
     "shell.execute_reply": "2024-10-28T03:00:38.068724Z",
     "shell.execute_reply.started": "2024-10-28T03:00:38.066433Z"
    },
    "id": "55d6629f-18ec-4372-a557-b254fbb1dd2d",
    "tags": []
   },
   "outputs": [],
   "source": [
    "# Chain\n",
    "chain = prompt | llm"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "94470770-8df4-4359-9504-ef6c8b3137ff",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "execution": {
     "iopub.execute_input": "2024-10-28T03:00:40.132665Z",
     "iopub.status.busy": "2024-10-28T03:00:40.132405Z",
     "iopub.status.idle": "2024-10-28T03:00:43.040274Z",
     "shell.execute_reply": "2024-10-28T03:00:43.039912Z",
     "shell.execute_reply.started": "2024-10-28T03:00:40.132650Z"
    },
    "id": "94470770-8df4-4359-9504-ef6c8b3137ff",
    "outputId": "f1da2859-d6be-41e1-940b-9e8670526855",
    "tags": []
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "ERROR:langsmith._internal._serde:Failed to use model_dump to serialize <class 'langchain_core.vectorstores.base.VectorStoreRetriever'> to JSON: PydanticSerializationError(Unable to serialize unknown type: <class 'langchain_community.vectorstores.chroma.Chroma'>)\n",
      "ERROR:langsmith._internal._serde:Failed to use model_dump to serialize <class 'langchain_core.vectorstores.base.VectorStoreRetriever'> to JSON: PydanticSerializationError(Unable to serialize unknown type: <class 'langchain_community.vectorstores.chroma.Chroma'>)\n",
      "ERROR:langsmith._internal._serde:Failed to use model_dump to serialize <class 'langchain_core.vectorstores.base.VectorStoreRetriever'> to JSON: PydanticSerializationError(Unable to serialize unknown type: <class 'langchain_community.vectorstores.chroma.Chroma'>)\n",
      "ERROR:langsmith._internal._serde:Failed to use model_dump to serialize <class 'langchain_core.vectorstores.base.VectorStoreRetriever'> to JSON: PydanticSerializationError(Unable to serialize unknown type: <class 'langchain_community.vectorstores.chroma.Chroma'>)\n"
     ]
    },
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "AIMessage(content='由于提供的上下文中没有包含与“人工智能”相关的具体信息或定义，我无法根据给定的检索内容回答这个问题。如果您有其他上下文或具体问题需要解答，请提供更多信息。', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 201, 'prompt_tokens': 79, 'total_tokens': 280, 'completion_tokens_details': {'accepted_prediction_tokens': None, 'audio_tokens': None, 'reasoning_tokens': 160, 'rejected_prediction_tokens': None}, 'prompt_tokens_details': {'audio_tokens': None, 'cached_tokens': 0}, 'prompt_cache_hit_tokens': 0, 'prompt_cache_miss_tokens': 79}, 'model_name': 'deepseek-reasoner', 'system_fingerprint': 'fp_7e73fd9a08', 'finish_reason': 'stop', 'logprobs': None}, id='run-de053d3b-02c0-496f-a974-fe77668bacf6-0', usage_metadata={'input_tokens': 79, 'output_tokens': 201, 'total_tokens': 280, 'input_token_details': {'cache_read': 0}, 'output_token_details': {'reasoning': 160}})"
      ]
     },
     "metadata": {},
     "execution_count": 17
    }
   ],
   "source": [
    "# Run\n",
    "chain.invoke({\"context\":retriever,\"question\":\"什么是人工智能？\"})"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "-xYQUWly5xu2",
   "metadata": {
    "id": "-xYQUWly5xu2"
   },
   "source": [
    "## 修改Prompt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "65770e2d-3d5e-4371-abc9-0aeca9646885",
   "metadata": {
    "id": "65770e2d-3d5e-4371-abc9-0aeca9646885"
   },
   "outputs": [],
   "source": [
    "from langchain import hub\n",
    "prompt_hub_rag = hub.pull(\"rlm/rag-prompt\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "f53e5840-0a0f-4428-a4a4-6922800aff89",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "f53e5840-0a0f-4428-a4a4-6922800aff89",
    "outputId": "c5d762e4-f136-4950-a0b4-753463aa462c"
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "ChatPromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, metadata={'lc_hub_owner': 'rlm', 'lc_hub_repo': 'rag-prompt', 'lc_hub_commit_hash': '50442af133e61576e74536c6556cefe1fac147cad032f4377b60c436e6cdcb6e'}, messages=[HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, template=\"You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.\\nQuestion: {question} \\nContext: {context} \\nAnswer:\"), additional_kwargs={})])"
      ]
     },
     "metadata": {},
     "execution_count": 19
    }
   ],
   "source": [
    "prompt_hub_rag"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "8208a8bc-c75f-4e8e-8601-680746cd6276",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 70
    },
    "id": "8208a8bc-c75f-4e8e-8601-680746cd6276",
    "outputId": "f3fb4d64-521d-4c33-eb81-27c6bf1d24c9"
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "'人工智能（Artificial Intelligence，AI）是研究、开发用于模拟、延伸和扩展人类智能的理论、方法、技术及应用系统的一门新的技术科学。作为智能学科的重要组成部分，其核心目标是探索智能的本质，并构建能够以类似人类智能的方式做出反应的智能机器。人工智能涵盖广泛的技术领域，包括机器人技术、语言识别、图像识别、自然语言处理、专家系统、机器学习及计算机视觉等。它被视为推动新一轮科技革命与产业变革的关键驱动力，在技术发展中也需同步关注治理挑战与伦理规范，以实现规范与创新的动态平衡。'"
      ],
      "application/vnd.google.colaboratory.intrinsic+json": {
       "type": "string"
      }
     },
     "metadata": {},
     "execution_count": 20
    }
   ],
   "source": [
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from langchain_core.runnables import RunnablePassthrough\n",
    "\n",
    "rag_chain = (\n",
    "    {\"context\": retriever, \"question\": RunnablePassthrough()}\n",
    "    | prompt\n",
    "    | llm\n",
    "    | StrOutputParser()\n",
    ")\n",
    "\n",
    "rag_chain.invoke(\"什么是人工智能？\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3fb474b9-1e38-4a20-91cc-24cdce6d8631",
   "metadata": {
    "id": "3fb474b9-1e38-4a20-91cc-24cdce6d8631"
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "colab": {
   "provenance": [],
   "collapsed_sections": [
    "1eae0ab7-d43b-43e0-8b99-6122a636fe0c"
   ]
  },
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
