{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "632c3c66",
   "metadata": {},
   "source": [
    "# 加载文档"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d34a5b90",
   "metadata": {},
   "outputs": [],
   "source": [
    "! pip install -q langchain-openai langchainhub\n",
    "! pip install -q chromadb==0.4.15\n",
    "! pip install --upgrade httpx httpx-sse PyJWT\n",
    "! pip install -q --upgrade --quiet  dashscope"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "1f7b7ab1",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "OPENAI_BASE_URL: ········\n"
     ]
    }
   ],
   "source": [
    "import os, getpass\n",
    "\n",
    "def _set_env(var: str):\n",
    "    if not os.environ.get(var):\n",
    "        os.environ[var] = getpass.getpass(f\"{var}: \")\n",
    "    \n",
    "_set_env(\"DEEPSEEK_API_KEY\")\n",
    "_set_env(\"OPENAI_API_KEY\")\n",
    "_set_env(\"OPENAI_BASE_URL\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "112793ea",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "sk-4f7e4007f05d411184e2d42181d80664\n",
      "https://api.fe8.cn/v1\n"
     ]
    }
   ],
   "source": [
    "print(os.environ[\"DEEPSEEK_API_KEY\"])\n",
    "print(os.environ[\"OPENAI_BASE_URL\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "3a009792",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "你好！我是 **DeepSeek-R1**，由深度求索（DeepSeek）研发的智能助手。我的知识更新到 **2024年7月**，支持中文和英文对话，可以处理上传的文本文件（如PDF、Word、Excel等），帮助你阅读、总结、翻译或解答问题。\n",
      "\n",
      "如果你之前用过 DeepSeek 的其他模型，比如 DeepSeek-Coder 或 DeepSeek-V2，我是它的新一代版本——**DeepSeek-R1**，理解力更强、逻辑更清晰，也更擅长处理复杂任务 😊\n",
      "\n",
      "有什么我可以帮你的吗？\n"
     ]
    }
   ],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "\n",
    "llm = ChatOpenAI(\n",
    "    model='deepseek-reasoner',\n",
    "    openai_api_key=DEEPSEEK_API_KEY,\n",
    "    openai_api_base='https://api.deepseek.com'\n",
    ")\n",
    "\n",
    "response = llm.invoke(\"你好，请问你是deepseek R1吗？\")\n",
    "print(response.content)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "id": "20962b7d",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "from openai import OpenAI\n",
    "from dotenv import load_dotenv,find_dotenv\n",
    "load_dotenv(find_dotenv())  # 加载 .env 文件中的环境变量\n",
    "\n",
    "DASHSCOPE_API_KEY = os.environ.get('DASHSCOPE_API_KEY')\n",
    "DEEPSEEK_API_KEY = os.environ.get('DEEPSEEK_API_KEY')\n",
    "LANGCHAIN_API_KEY= os.environ.get('LANGCHAIN_API_KEY')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "4a323e97",
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\CacheData\\anaconda\\envs\\python310\\lib\\site-packages\\langsmith\\client.py:280: LangSmithMissingAPIKeyWarning: API key must be provided when using hosted LangSmith API\n",
      "  warnings.warn(\n"
     ]
    }
   ],
   "source": [
    "import bs4\n",
    "from langchain import hub\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "from langchain_community.document_loaders import WebBaseLoader\n",
    "from langchain_community.vectorstores import Chroma\n",
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from langchain_core.runnables import RunnablePassthrough\n",
    "from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",
    "\n",
    "#### 索引 ####\n",
    "\n",
    "# 加载文档\n",
    "loader = WebBaseLoader(\n",
    "    web_paths=(\"https://baike.baidu.com/item/%E4%BA%BA%E5%B7%A5%E6%99%BA%E8%83%BD/9180?fr=ge_ala\",)\n",
    ")\n",
    "docs = loader.load()\n",
    "\n",
    "# 切分\n",
    "text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)\n",
    "splits = text_splitter.split_documents(docs)\n",
    "\n",
    "# 嵌入\n",
    "vectorstore = Chroma.from_documents(documents=splits,\n",
    "                                    embedding=OpenAIEmbeddings())\n",
    "\n",
    "retriever = vectorstore.as_retriever(search_kwargs={\"k\":5})\n",
    "\n",
    "#### 检索和生成 ####\n",
    "\n",
    "# Prompt\n",
    "prompt = hub.pull(\"rlm/rag-prompt\")\n",
    "\n",
    "# LLM\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\", temperature=0)\n",
    "\n",
    "# 后期处理\n",
    "def format_docs(docs):\n",
    "    return \"\\n\\n\".join(doc.page_content for doc in docs)\n",
    "\n",
    "# Chain\n",
    "rag_chain = (\n",
    "    {\"context\": retriever | format_docs, \"question\": RunnablePassthrough()}\n",
    "    | prompt\n",
    "    | llm\n",
    "    | StrOutputParser()\n",
    ")\n",
    "\n",
    "# 问答\n",
    "# rag_chain.invoke(\"能否简答介绍下什么是人工智能?\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "0263fcdd",
   "metadata": {
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "# 文件\n",
    "question = \"什么是人工智能？\"\n",
    "document = \"人工智能（Artificial Intelligence），英文缩写为AI，是新一轮科技革命和产业变革的重要驱动力量\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "523f3757",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "1536"
      ]
     },
     "execution_count": 36,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "embd=DashScopeEmbeddings(\n",
    "    model=\"text-embedding-v1\", dashscope_api_key=DASHSCOPE_API_KEY\n",
    ")\n",
    "query_result = embd.embed_query(question)\n",
    "document_result = embd.embed_query(document)\n",
    "len(query_result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "7c7a2a01",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Cosine Similarity: 0.8333941339585715\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "\n",
    "def cosine_similarity(vec1, vec2):\n",
    "    dot_product = np.dot(vec1, vec2)\n",
    "    norm_vec1 = np.linalg.norm(vec1)\n",
    "    norm_vec2 = np.linalg.norm(vec2)\n",
    "    return dot_product / (norm_vec1 * norm_vec2)\n",
    "\n",
    "similarity = cosine_similarity(query_result, document_result)\n",
    "print(\"Cosine Similarity:\", similarity)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "c86f0483",
   "metadata": {},
   "outputs": [],
   "source": [
    "#### 索引 ####\n",
    "\n",
    "# 加载文档\n",
    "import bs4\n",
    "from langchain_community.document_loaders import WebBaseLoader\n",
    "loader = WebBaseLoader(\n",
    "    web_paths=(\"https://baike.baidu.com/item/%E4%BA%BA%E5%B7%A5%E6%99%BA%E8%83%BD/9180?fr=ge_ala\",)\n",
    ")\n",
    "blog_docs = loader.load()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "67498461",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 分割\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(\n",
    "    chunk_size=300,\n",
    "    chunk_overlap=50)\n",
    "\n",
    "# 进行分割\n",
    "splits = text_splitter.split_documents(blog_docs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "e047e2ef",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 索引\n",
    "from langchain_openai import OpenAIEmbeddings\n",
    "from langchain_community.vectorstores import Chroma\n",
    "vectorstore = Chroma.from_documents(documents=splits,\n",
    "                                    embedding=embd)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "id": "d1147dde",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 索引\n",
    "from langchain_openai import OpenAIEmbeddings\n",
    "from langchain_community.vectorstores import Chroma\n",
    "vectorstore = Chroma.from_documents(documents=splits,\n",
    "                                    embedding=embd)\n",
    "\n",
    "\n",
    "retriever = vectorstore.as_retriever(search_kwargs={\"k\": 5})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "id": "ca765dcc",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_15312\\3239286720.py:1: LangChainDeprecationWarning: The method `BaseRetriever.get_relevant_documents` was deprecated in langchain-core 0.1.46 and will be removed in 1.0. Use :meth:`~invoke` instead.\n",
      "  docs = retriever.get_relevant_documents(\"什么是人工智能？\")\n"
     ]
    }
   ],
   "source": [
    "docs = retriever.get_relevant_documents(\"什么是人工智能？\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "5658001b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "5"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(docs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "id": "4e383667",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "ChatPromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, messages=[HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, template='您是问答任务的助手。使用以下检索到的上下文来回答问题。如果您不知道答案，就说您不知道。\\n{context}\\n\\n问题：{question}\\n'), additional_kwargs={})])"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "from langchain.prompts import ChatPromptTemplate\n",
    "\n",
    "# Prompt\n",
    "# template = \"\"\"Answer the question based on the following retrieved context:\n",
    "# {context}\n",
    "\n",
    "# Question: {question}\n",
    "# \"\"\"\n",
    "\n",
    "\n",
    "template = \"\"\"您是问答任务的助手。使用以下检索到的上下文来回答问题。如果您不知道答案，就说您不知道。\n",
    "{context}\n",
    "\n",
    "问题：{question}\n",
    "\"\"\"\n",
    "\n",
    "prompt = ChatPromptTemplate.from_template(template)\n",
    "prompt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "id": "4c1ff612",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Chain\n",
    "chain = prompt | llm"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "id": "d4e81fd8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='人工智能（AI）是计算机科学的一个领域，致力于创建能够执行通常需要人类智能的任务的系统。这些任务包括但不限于学习、推理、问题解决、感知、语言理解和生成、以及运动能力。人工智能技术可以应用于各种领域，如自动驾驶汽车、语音识别、图像识别、自然语言处理和机器人技术等。AI系统通常通过算法和模型来处理数据，从中学习并做出决策。', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 106, 'prompt_tokens': 87, 'total_tokens': 193, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'id': 'chatcmpl-Bcvs3m6WtzMqTNfhejSevPGpz5gqJ', 'service_tier': None, 'finish_reason': 'stop', 'logprobs': None}, id='run--d425379c-bea4-4bb9-a089-a2aff694482e-0', usage_metadata={'input_tokens': 87, 'output_tokens': 106, 'total_tokens': 193, 'input_token_details': {}, 'output_token_details': {}})"
      ]
     },
     "execution_count": 46,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Run\n",
    "chain.invoke({\"context\":retriever,\"question\":\"什么是人工智能？\"})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "id": "07cd8542",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\CacheData\\anaconda\\envs\\python310\\lib\\site-packages\\langsmith\\client.py:280: LangSmithMissingAPIKeyWarning: API key must be provided when using hosted LangSmith API\n",
      "  warnings.warn(\n"
     ]
    }
   ],
   "source": [
    "from langchain import hub\n",
    "prompt_hub_rag = hub.pull(\"rlm/rag-prompt\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "id": "4eac8cf7",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "ChatPromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, metadata={'lc_hub_owner': 'rlm', 'lc_hub_repo': 'rag-prompt', 'lc_hub_commit_hash': '50442af133e61576e74536c6556cefe1fac147cad032f4377b60c436e6cdcb6e'}, messages=[HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['context', 'question'], input_types={}, partial_variables={}, template=\"You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.\\nQuestion: {question} \\nContext: {context} \\nAnswer:\"), additional_kwargs={})])"
      ]
     },
     "execution_count": 50,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "prompt_hub_rag"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 52,
   "id": "98b5661b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'我不知道。'"
      ]
     },
     "execution_count": 52,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from langchain_core.runnables import RunnablePassthrough\n",
    "\n",
    "rag_chain = (\n",
    "    {\"context\": retriever, \"question\": RunnablePassthrough()}\n",
    "    | prompt\n",
    "    | llm\n",
    "    | StrOutputParser()\n",
    ")\n",
    "\n",
    "rag_chain.invoke(\"什么是人工智能？\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "04e451fb",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.10",
   "language": "python",
   "name": "python310"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.16"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
