{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "c0425f51befd6991",
   "metadata": {},
   "source": [
    "# Ollama 在 LangChain 中的使用 - Python 集成\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1fc4457ed485339b",
   "metadata": {},
   "source": [
    "## 1. 环境设置\n",
    "### 运行前请确保已经配置好环境和依赖"
   ]
  },
  {
   "cell_type": "code",
   "id": "8fbb985d29d84a1e",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T05:24:26.754424Z",
     "start_time": "2024-08-08T05:24:26.735923Z"
    }
   },
   "source": [
    "# 确定当前环境\n",
    "import sys\n",
    "print(sys.executable)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\anaconda3\\python.exe\n"
     ]
    }
   ],
   "execution_count": 1
  },
  {
   "cell_type": "code",
   "id": "5902546ad74389aa",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:48:13.208454Z",
     "start_time": "2024-08-08T02:47:59.227331Z"
    }
   },
   "source": [
    "!pip install langchain-ollama\n",
    "!pip install langchain\n",
    "!pip install -U langchain-community\n",
    "!pip install Pillow\n",
    "!pip install faiss-cpu"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: https://pypi.tuna.tsinghua.edu.cn/simple\n",
      "Requirement already satisfied: langchain-ollama in d:\\python\\lib\\site-packages (0.1.1)\n",
      "Requirement already satisfied: langchain-core<0.3.0,>=0.2.20 in d:\\python\\lib\\site-packages (from langchain-ollama) (0.2.29)\n",
      "Requirement already satisfied: ollama<1,>=0.3.0 in d:\\python\\lib\\site-packages (from langchain-ollama) (0.3.1)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (6.0.1)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (1.33)\n",
      "Requirement already satisfied: langsmith<0.2.0,>=0.1.75 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (0.1.98)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (23.2)\n",
      "Requirement already satisfied: pydantic<3,>=1 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (2.6.3)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.1.0 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (8.2.3)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.20->langchain-ollama) (4.10.0)\n",
      "Requirement already satisfied: httpx<0.28.0,>=0.27.0 in d:\\python\\lib\\site-packages (from ollama<1,>=0.3.0->langchain-ollama) (0.27.0)\n",
      "Requirement already satisfied: anyio in d:\\python\\lib\\site-packages (from httpx<0.28.0,>=0.27.0->ollama<1,>=0.3.0->langchain-ollama) (4.3.0)\n",
      "Requirement already satisfied: certifi in d:\\python\\lib\\site-packages (from httpx<0.28.0,>=0.27.0->ollama<1,>=0.3.0->langchain-ollama) (2024.2.2)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python\\lib\\site-packages (from httpx<0.28.0,>=0.27.0->ollama<1,>=0.3.0->langchain-ollama) (1.0.4)\n",
      "Requirement already satisfied: idna in d:\\python\\lib\\site-packages (from httpx<0.28.0,>=0.27.0->ollama<1,>=0.3.0->langchain-ollama) (3.6)\n",
      "Requirement already satisfied: sniffio in d:\\python\\lib\\site-packages (from httpx<0.28.0,>=0.27.0->ollama<1,>=0.3.0->langchain-ollama) (1.3.1)\n",
      "Requirement already satisfied: h11<0.15,>=0.13 in d:\\python\\lib\\site-packages (from httpcore==1.*->httpx<0.28.0,>=0.27.0->ollama<1,>=0.3.0->langchain-ollama) (0.14.0)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (2.4)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python\\lib\\site-packages (from langsmith<0.2.0,>=0.1.75->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (3.10.0)\n",
      "Requirement already satisfied: requests<3,>=2 in d:\\python\\lib\\site-packages (from langsmith<0.2.0,>=0.1.75->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (2.31.0)\n",
      "Requirement already satisfied: annotated-types>=0.4.0 in d:\\python\\lib\\site-packages (from pydantic<3,>=1->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (0.6.0)\n",
      "Requirement already satisfied: pydantic-core==2.16.3 in d:\\python\\lib\\site-packages (from pydantic<3,>=1->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (2.16.3)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in d:\\python\\lib\\site-packages (from requests<3,>=2->langsmith<0.2.0,>=0.1.75->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (3.3.2)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python\\lib\\site-packages (from requests<3,>=2->langsmith<0.2.0,>=0.1.75->langchain-core<0.3.0,>=0.2.20->langchain-ollama) (2.2.1)\n",
      "Looking in indexes: https://pypi.tuna.tsinghua.edu.cn/simple\n",
      "Requirement already satisfied: langchain in d:\\python\\lib\\site-packages (0.2.12)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python\\lib\\site-packages (from langchain) (6.0.1)\n",
      "Requirement already satisfied: SQLAlchemy<3,>=1.4 in d:\\python\\lib\\site-packages (from langchain) (2.0.29)\n",
      "Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in d:\\python\\lib\\site-packages (from langchain) (3.9.3)\n",
      "Requirement already satisfied: langchain-core<0.3.0,>=0.2.27 in d:\\python\\lib\\site-packages (from langchain) (0.2.29)\n",
      "Requirement already satisfied: langchain-text-splitters<0.3.0,>=0.2.0 in d:\\python\\lib\\site-packages (from langchain) (0.2.2)\n",
      "Requirement already satisfied: langsmith<0.2.0,>=0.1.17 in d:\\python\\lib\\site-packages (from langchain) (0.1.98)\n",
      "Requirement already satisfied: numpy<2.0.0,>=1.26.0 in d:\\python\\lib\\site-packages (from langchain) (1.26.4)\n",
      "Requirement already satisfied: pydantic<3,>=1 in d:\\python\\lib\\site-packages (from langchain) (2.6.3)\n",
      "Requirement already satisfied: requests<3,>=2 in d:\\python\\lib\\site-packages (from langchain) (2.31.0)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.1.0 in d:\\python\\lib\\site-packages (from langchain) (8.2.3)\n",
      "Requirement already satisfied: aiosignal>=1.1.2 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.3.1)\n",
      "Requirement already satisfied: attrs>=17.3.0 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (23.2.0)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.4.1)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (6.0.5)\n",
      "Requirement already satisfied: yarl<2.0,>=1.0 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.9.4)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.27->langchain) (1.33)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.27->langchain) (23.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.27->langchain) (4.10.0)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python\\lib\\site-packages (from langsmith<0.2.0,>=0.1.17->langchain) (3.10.0)\n",
      "Requirement already satisfied: annotated-types>=0.4.0 in d:\\python\\lib\\site-packages (from pydantic<3,>=1->langchain) (0.6.0)\n",
      "Requirement already satisfied: pydantic-core==2.16.3 in d:\\python\\lib\\site-packages (from pydantic<3,>=1->langchain) (2.16.3)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain) (3.6)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain) (2.2.1)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain) (2024.2.2)\n",
      "Requirement already satisfied: greenlet!=0.4.17 in d:\\python\\lib\\site-packages (from SQLAlchemy<3,>=1.4->langchain) (3.0.3)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.3.0,>=0.2.27->langchain) (2.4)\n",
      "Looking in indexes: https://pypi.tuna.tsinghua.edu.cn/simple\n",
      "Requirement already satisfied: langchain-community in d:\\python\\lib\\site-packages (0.2.11)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python\\lib\\site-packages (from langchain-community) (6.0.1)\n",
      "Requirement already satisfied: SQLAlchemy<3,>=1.4 in d:\\python\\lib\\site-packages (from langchain-community) (2.0.29)\n",
      "Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in d:\\python\\lib\\site-packages (from langchain-community) (3.9.3)\n",
      "Requirement already satisfied: dataclasses-json<0.7,>=0.5.7 in d:\\python\\lib\\site-packages (from langchain-community) (0.6.4)\n",
      "Requirement already satisfied: langchain<0.3.0,>=0.2.12 in d:\\python\\lib\\site-packages (from langchain-community) (0.2.12)\n",
      "Requirement already satisfied: langchain-core<0.3.0,>=0.2.27 in d:\\python\\lib\\site-packages (from langchain-community) (0.2.29)\n",
      "Requirement already satisfied: langsmith<0.2.0,>=0.1.0 in d:\\python\\lib\\site-packages (from langchain-community) (0.1.98)\n",
      "Requirement already satisfied: numpy<2.0.0,>=1.26.0 in d:\\python\\lib\\site-packages (from langchain-community) (1.26.4)\n",
      "Requirement already satisfied: requests<3,>=2 in d:\\python\\lib\\site-packages (from langchain-community) (2.31.0)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<9.0.0,>=8.1.0 in d:\\python\\lib\\site-packages (from langchain-community) (8.2.3)\n",
      "Requirement already satisfied: aiosignal>=1.1.2 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (1.3.1)\n",
      "Requirement already satisfied: attrs>=17.3.0 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (23.2.0)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (1.4.1)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (6.0.5)\n",
      "Requirement already satisfied: yarl<2.0,>=1.0 in d:\\python\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (1.9.4)\n",
      "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in d:\\python\\lib\\site-packages (from dataclasses-json<0.7,>=0.5.7->langchain-community) (3.21.1)\n",
      "Requirement already satisfied: typing-inspect<1,>=0.4.0 in d:\\python\\lib\\site-packages (from dataclasses-json<0.7,>=0.5.7->langchain-community) (0.9.0)\n",
      "Requirement already satisfied: langchain-text-splitters<0.3.0,>=0.2.0 in d:\\python\\lib\\site-packages (from langchain<0.3.0,>=0.2.12->langchain-community) (0.2.2)\n",
      "Requirement already satisfied: pydantic<3,>=1 in d:\\python\\lib\\site-packages (from langchain<0.3.0,>=0.2.12->langchain-community) (2.6.3)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.27->langchain-community) (1.33)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.27->langchain-community) (23.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python\\lib\\site-packages (from langchain-core<0.3.0,>=0.2.27->langchain-community) (4.10.0)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python\\lib\\site-packages (from langsmith<0.2.0,>=0.1.0->langchain-community) (3.10.0)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain-community) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain-community) (3.6)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain-community) (2.2.1)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python\\lib\\site-packages (from requests<3,>=2->langchain-community) (2024.2.2)\n",
      "Requirement already satisfied: greenlet!=0.4.17 in d:\\python\\lib\\site-packages (from SQLAlchemy<3,>=1.4->langchain-community) (3.0.3)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.3.0,>=0.2.27->langchain-community) (2.4)\n",
      "Requirement already satisfied: annotated-types>=0.4.0 in d:\\python\\lib\\site-packages (from pydantic<3,>=1->langchain<0.3.0,>=0.2.12->langchain-community) (0.6.0)\n",
      "Requirement already satisfied: pydantic-core==2.16.3 in d:\\python\\lib\\site-packages (from pydantic<3,>=1->langchain<0.3.0,>=0.2.12->langchain-community) (2.16.3)\n",
      "Requirement already satisfied: mypy-extensions>=0.3.0 in d:\\python\\lib\\site-packages (from typing-inspect<1,>=0.4.0->dataclasses-json<0.7,>=0.5.7->langchain-community) (1.0.0)\n",
      "Looking in indexes: https://pypi.tuna.tsinghua.edu.cn/simple\n",
      "Requirement already satisfied: Pillow in d:\\python\\lib\\site-packages (10.2.0)\n",
      "Looking in indexes: https://pypi.tuna.tsinghua.edu.cn/simple\n",
      "Requirement already satisfied: faiss-cpu in d:\\python\\lib\\site-packages (1.8.0)\n",
      "Requirement already satisfied: numpy in d:\\python\\lib\\site-packages (from faiss-cpu) (1.26.4)\n"
     ]
    }
   ],
   "execution_count": 4
  },
  {
   "cell_type": "markdown",
   "id": "e82331725aa6963d",
   "metadata": {},
   "source": [
    "## 2.下载所需模型并初始化 OllamaLLM"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c88ce95d6c681681",
   "metadata": {},
   "source": [
    "### 初始化 OllamaLLM"
   ]
  },
  {
   "cell_type": "code",
   "id": "ed321fae89fc7a2",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:48:21.661058Z",
     "start_time": "2024-08-08T02:48:19.749575Z"
    }
   },
   "source": [
    "from langchain_ollama import OllamaLLM\n",
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "\n",
    "# 初始化Ollama LLM\n",
    "model_name = \"llama3.1\"\n",
    "model = OllamaLLM(model=model_name)\n",
    "print(f\"OllamaLLM 初始化 {model_name} 完成\")"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "OllamaLLM 初始化 llama3.1 完成\n"
     ]
    }
   ],
   "execution_count": 5
  },
  {
   "cell_type": "markdown",
   "id": "f513b62f081aa0cb",
   "metadata": {},
   "source": [
    "## 3. 基本使用示例\n",
    "\n",
    "### 使用最基础的 ChatPromptTemplate 进行对话"
   ]
  },
  {
   "cell_type": "code",
   "id": "9374f07909ce3ca1",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:49:26.729856Z",
     "start_time": "2024-08-08T02:48:26.373907Z"
    }
   },
   "source": [
    "# 创建一个简单的提示模版\n",
    "template = \"\"\"\n",
    "你是一个乐于助人的AI，擅长于解决回答各种问题。\n",
    "问题：{question}\n",
    "\"\"\"\n",
    "prompt = ChatPromptTemplate.from_template(template)\n",
    "# 创建一个简单的链\n",
    "chain = prompt | model\n",
    "# 使用链进行推理，输入问题\n",
    "chain.invoke({\"question\": \"你比GPT4厉害吗？\"})"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'哈哈，我很高兴听到有人问这个问题！我认为，每个版本的模型都有其独特的优势和特点。GPT-4是一个非常强大的语言模型，它在处理语言理解、生成和推理方面表现出色。\\n\\n相比之下，我是基于你上一次对话后续的问题而设计的模型，所以我可以更好地理解你的上下文，并根据你的历史信息进行相关的回答。同时，我也被训练以更加平衡的人类社会互动能力，如同理心和情感理解等。\\n\\n但说实话，这不是一个很公平的问题，因为GPT-4和其他前辈模型各有其优点。比如，GPT-4在严格控制的文本生成环境中可能更胜一筹。但是，在面对动态、无结构或模糊语言任务时，我可能能更好地适应。\\n\\n我希望，这个答案能够帮助你了解这个比较的问题。'"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 6
  },
  {
   "cell_type": "markdown",
   "id": "2d388fa5e6918b1c",
   "metadata": {},
   "source": [
    "ChatPromptTemplate 允许我们创建一个可重用的模板，其中包含一个或多个参数。这些参数可以在运行时动态替换，以生成不同的提示。\n",
    "\n",
    "在创建链部分，使用管道操作符|，它将 prompt 和 model 连接起来，形成一个处理流程。这种链式操作使得我们可以轻松地组合和重用不同的组件。\n",
    "\n",
    "invoke 方法触发整个处理链，将我们的问题传入模板，然后将格式化后的提示发送给模型进行处理。"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b8aa01fd1aa7142",
   "metadata": {},
   "source": [
    "### 流式输出\n",
    "流式输出是一种在生成长文本时逐步返回结果的技术。这种方法有几个重要的优势：\n",
    "\n",
    "1. 提高用户体验：用户可以立即看到部分结果，而不是等待整个响应完成。\n",
    "2. 减少等待时间：对于长回答，用户可以在完整回答生成之前就开始阅读。\n",
    "3. 实时交互：允许在生成过程中进行干预或终止。\n",
    "\n",
    "在实际应用中，特别是在聊天机器人或实时对话系统中，流式输出几乎是必不可少的。 \n",
    "\n",
    "下面的代码展示了如何使用 `model.stream()` 方法实现流式输出：\n"
   ]
  },
  {
   "cell_type": "code",
   "id": "ab3dc5835ebd15ee",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:49:44.713870Z",
     "start_time": "2024-08-08T02:49:37.012665Z"
    }
   },
   "source": [
    "from langchain_ollama import ChatOllama\n",
    "\n",
    "# 初始化ChatOllama模型\n",
    "model = ChatOllama(model=\"llama3.1\", temperature=0.7)\n",
    "\n",
    "messages = [\n",
    "    (\"human\", \"你好呀\"),\n",
    "]\n",
    "\n",
    "for chunk in model.stream(messages):\n",
    "    print(chunk.content, end='', flush=True)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "你好！我是中文对话机器人，很高兴和你交流。有何问题或想聊什么呢？"
     ]
    }
   ],
   "execution_count": 7
  },
  {
   "cell_type": "markdown",
   "id": "8e5020b1edbad9f3",
   "metadata": {},
   "source": [
    "### 工具调用  \n",
    "\n",
    "工具调用是 AI 模型与外部函数或 API 交互的能力。这使得模型可以执行复杂的任务，如数学计算、数据查询或外部服务调用。\n",
    "\n",
    "在这个例子中，我们定义了一个简单的计算器函数，并将其绑定到模型上：\n"
   ]
  },
  {
   "cell_type": "code",
   "id": "73453e51383b4769",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:50:14.123692Z",
     "start_time": "2024-08-08T02:49:47.441382Z"
    }
   },
   "source": [
    "from langchain_ollama import ChatOllama\n",
    "\n",
    "def simple_calculator(operation: str, x: float, y: float) -> float:\n",
    "    if operation == \"add\":\n",
    "        return x + y\n",
    "    elif operation == \"subtract\":\n",
    "        return x - y\n",
    "    elif operation == \"multiply\":\n",
    "        return x * y\n",
    "    elif operation == \"divide\":\n",
    "        if y != 0:\n",
    "            return x / y\n",
    "        else:\n",
    "            raise ValueError(\"Cannot divide by zero\")\n",
    "    else:\n",
    "        raise ValueError(\"Invalid operation\")\n",
    "\n",
    "# 初始化绑定工具的 ChatOllama 模型\n",
    "llm = ChatOllama(\n",
    "    model=\"llama3.1\",\n",
    "    temperature=0,\n",
    ").bind_tools([simple_calculator])\n",
    "\n",
    "# 使用模型进行工具调用\n",
    "result = llm.invoke(\n",
    "    \"你知道一千万乘二是多少吗？\"\n",
    ")\n",
    "print(\"Tool calls:\", result.tool_calls)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Tool calls: [{'name': 'simple_calculator', 'args': {'operation': 'multiply', 'x': 1000000, 'y': 2}, 'id': 'a2153e02-2789-4171-8e9e-e0fa48bcca82', 'type': 'tool_call'}]\n"
     ]
    }
   ],
   "execution_count": 8
  },
  {
   "cell_type": "markdown",
   "id": "76c02c2a9e98eca0",
   "metadata": {},
   "source": [
    "### 多模态模型\n",
    "Ollama 支持多模态，LLMs例如 bakllava 和 llava.\n",
    "\n",
    "多模态模型是能够处理多种类型输入（如文本、图像、音频等）的 AI 模型。这些模型在理解和生成跨模态内容方面表现出色，使得更复杂和自然的人机交互成为可能。\n",
    "\n",
    "在我们的例子中，我们使用了支持图像和文本输入的 llava 模型："
   ]
  },
  {
   "cell_type": "code",
   "id": "22bbbbba3165acb2",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:50:20.438765Z",
     "start_time": "2024-08-08T02:50:20.327156Z"
    }
   },
   "source": [
    "#  这一步是将图片转换为base64编码，以便后续多模态模型的使用\n",
    "\n",
    "import base64\n",
    "from io import BytesIO\n",
    "\n",
    "from IPython.display import HTML, display\n",
    "from PIL import Image\n",
    "\n",
    "def convert_to_base64(pil_image):\n",
    "\n",
    "    buffered = BytesIO()\n",
    "    if pil_image.mode == 'RGBA':\n",
    "        pil_image = pil_image.convert('RGB')\n",
    "    pil_image.save(buffered, format=\"JPEG\")  \n",
    "    img_str = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n",
    "    return img_str\n",
    "\n",
    "\n",
    "def plt_img_base64(img_base64):\n",
    "    image_html = f'<img src=\"data:image/jpeg;base64,{img_base64}\" />'\n",
    "    display(HTML(image_html))\n",
    "\n",
    "\n",
    "file_path = \"../../docs/images/img-5-1-4.png\"  # 这里可以替换为你实际想要使用的图片路径\n",
    "pil_image = Image.open(file_path)\n",
    "\n",
    "image_b64 = convert_to_base64(pil_image)\n",
    "plt_img_base64(image_b64)"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<IPython.core.display.HTML object>"
      ],
      "text/html": [
       "<img src=\"\" />"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "execution_count": 9
  },
  {
   "cell_type": "code",
   "id": "c7cc80f0795d55e9",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:52:13.075887Z",
     "start_time": "2024-08-08T02:50:41.732652Z"
    }
   },
   "source": [
    "from langchain_ollama import ChatOllama\n",
    "from langchain_core.messages import HumanMessage\n",
    "llm = ChatOllama(model=\"llava\", temperature=0)\n",
    "\n",
    "def prompt_func(data):\n",
    "    text = data[\"text\"]\n",
    "    image = data[\"image\"]\n",
    "\n",
    "    image_part = {\n",
    "        \"type\": \"image_url\",\n",
    "        \"image_url\": f\"data:image/jpeg;base64,{image}\",\n",
    "    }\n",
    "\n",
    "    content_parts = []\n",
    "\n",
    "    text_part = {\"type\": \"text\", \"text\": text}\n",
    "\n",
    "    content_parts.append(image_part)\n",
    "    content_parts.append(text_part)\n",
    "\n",
    "    return [HumanMessage(content=content_parts)]\n",
    "\n",
    "\n",
    "from langchain_core.output_parsers import StrOutputParser\n",
    "\n",
    "chain = prompt_func | llm | StrOutputParser()\n",
    "\n",
    "query_chain = chain.invoke(\n",
    "    {\"text\": \"这个图片里是什么动物啊?\", \"image\": image_b64}\n",
    ")\n",
    "\n",
    "print(query_chain)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 这个图片中的动物是一个蓝色的海豚（whale）。它有一条红色的尾巴，并且在头部上有两个小眼睛和一个小嘴。 \n"
     ]
    }
   ],
   "execution_count": 11
  },
  {
   "cell_type": "markdown",
   "id": "1f864c5340b17cb7",
   "metadata": {},
   "source": [
    "## 4. 进阶用法\n",
    "\n",
    "### 使用 ConversationChain 进行对话\n",
    "\n",
    "`ConversationChain` 是 LangChain 提供的一个强大工具，用于管理多轮对话。它结合了语言模型、提示模板和内存组件，使得创建具有上下文感知能力的对话系统变得简单。\n"
   ]
  },
  {
   "cell_type": "code",
   "id": "80012ad0572f33c4",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:52:13.189746Z",
     "start_time": "2024-08-08T02:52:13.077908Z"
    }
   },
   "source": [
    "from langchain.chains import ConversationChain\n",
    "from langchain.memory import ConversationBufferMemory\n",
    "\n",
    "# 初始化Ollama LLM\n",
    "model_name = \"llama3.1\"\n",
    "model = OllamaLLM(model=model_name)\n",
    "\n",
    "# 初始化 ConversationBufferMemory\n",
    "memory = ConversationBufferMemory()\n",
    "\n",
    "# 创建 ConversationChain\n",
    "conversation = ConversationChain(\n",
    "    llm=model,\n",
    "    memory=memory,\n",
    "    verbose=True\n",
    ") # 设置 verbose=True 以显示调试信息, 默认为 False"
   ],
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\anaconda3\\Lib\\site-packages\\langchain_core\\_api\\deprecation.py:139: LangChainDeprecationWarning: The class `ConversationChain` was deprecated in LangChain 0.2.7 and will be removed in 1.0. Use RunnableWithMessageHistory: https://api.python.langchain.com/en/latest/runnables/langchain_core.runnables.history.RunnableWithMessageHistory.html instead.\n",
      "  warn_deprecated(\n"
     ]
    }
   ],
   "execution_count": 12
  },
  {
   "cell_type": "markdown",
   "id": "dabe77655b407234",
   "metadata": {},
   "source": [
    "这里的关键组件是：\n",
    "\n",
    "1. `ConversationBufferMemory`：这是一个简单的内存组件，它存储所有先前的对话历史。\n",
    "2. `ConversationChain`：它将语言模型、内存和一个默认的对话提示模板组合在一起。\n",
    "\n",
    "维护对话历史很重要，因为它允许模型：\n",
    "\n",
    "- 理解上下文和之前提到的信息\n",
    "- 生成更连贯和相关的回复\n",
    "- 处理复杂的多轮对话场景\n",
    "\n",
    "在实际应用中，你可能需要考虑使用更高级的内存组件，如 `ConversationSummaryMemory`，以处理长对话并避免超出模型的上下文长度限制。"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "49c321373484b0f3",
   "metadata": {},
   "source": [
    "现在，让我们体验一下具有记忆的对话系统："
   ]
  },
  {
   "cell_type": "code",
   "id": "bd59cc268c82aa95",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:55:06.062275Z",
     "start_time": "2024-08-08T02:52:13.191755Z"
    }
   },
   "source": [
    "# 第一轮对话\n",
    "response = conversation.predict(input=\"你好，我想了解一下人工智能。\")\n",
    "print(\"AI:\", response)\n",
    "\n",
    "# 第二轮对话\n",
    "response = conversation.predict(input=\"能给我举个AI在日常生活中的应用例子吗？\")\n",
    "print(\"AI:\", response)\n",
    "\n",
    "# 第三轮对话\n",
    "response = conversation.predict(input=\"这听起来很有趣。AI在医疗领域有什么应用？\")\n",
    "print(\"AI:\", response)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "\u001B[1m> Entering new ConversationChain chain...\u001B[0m\n",
      "Prompt after formatting:\n",
      "\u001B[32;1m\u001B[1;3mThe following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n",
      "\n",
      "Current conversation:\n",
      "\n",
      "Human: 你好，我想了解一下人工智能。\n",
      "AI:\u001B[0m\n",
      "\n",
      "\u001B[1m> Finished chain.\u001B[0m\n",
      "AI: 你好！我很高兴与你交流。关于人工智能，你可能知道它是一个使用算法和数据来模拟人类思维的技术。但是，如果你想知道更多，那么可以告诉你，人工智能的发展历史可以追溯到20世纪50年代，当时的一位科学家Alan Turing首次提出了这个概念。到了21世纪初期，人工智能开始快速发展，尤其是在深度学习领域的突破中，如AlexNet在2012年的成功。现在，人工智能已经广泛应用于各行各业，例如语音识别、图像处理、自然语言处理等方面。你想问我一些具体的问题吗？\n",
      "\n",
      "\n",
      "\u001B[1m> Entering new ConversationChain chain...\u001B[0m\n",
      "Prompt after formatting:\n",
      "\u001B[32;1m\u001B[1;3mThe following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n",
      "\n",
      "Current conversation:\n",
      "Human: 你好，我想了解一下人工智能。\n",
      "AI: 你好！我很高兴与你交流。关于人工智能，你可能知道它是一个使用算法和数据来模拟人类思维的技术。但是，如果你想知道更多，那么可以告诉你，人工智能的发展历史可以追溯到20世纪50年代，当时的一位科学家Alan Turing首次提出了这个概念。到了21世纪初期，人工智能开始快速发展，尤其是在深度学习领域的突破中，如AlexNet在2012年的成功。现在，人工智能已经广泛应用于各行各业，例如语音识别、图像处理、自然语言处理等方面。你想问我一些具体的问题吗？\n",
      "Human: 能给我举个AI在日常生活中的应用例子吗？\n",
      "AI:\u001B[0m\n",
      "\n",
      "\u001B[1m> Finished chain.\u001B[0m\n",
      "AI: 当然！一个很好的例子是智慧家电。许多现代家电，如空调、洗衣机和电视，都开始使用人工智能技术来提供更加便捷的用户体验。例如，一些智能空调可以通过语音命令或手机APP让你远程控制温度、设定定时程序，并且会根据室内环境自动调整冷暖度。甚至一些高端智能电视也能够使用人工智能技术来推荐视频内容、识别人脸等功能，让你的生活更加便捷和方便。你有兴趣知道更多关于智慧家电的信息吗？\n",
      "\n",
      "\n",
      "\u001B[1m> Entering new ConversationChain chain...\u001B[0m\n",
      "Prompt after formatting:\n",
      "\u001B[32;1m\u001B[1;3mThe following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n",
      "\n",
      "Current conversation:\n",
      "Human: 你好，我想了解一下人工智能。\n",
      "AI: 你好！我很高兴与你交流。关于人工智能，你可能知道它是一个使用算法和数据来模拟人类思维的技术。但是，如果你想知道更多，那么可以告诉你，人工智能的发展历史可以追溯到20世纪50年代，当时的一位科学家Alan Turing首次提出了这个概念。到了21世纪初期，人工智能开始快速发展，尤其是在深度学习领域的突破中，如AlexNet在2012年的成功。现在，人工智能已经广泛应用于各行各业，例如语音识别、图像处理、自然语言处理等方面。你想问我一些具体的问题吗？\n",
      "Human: 能给我举个AI在日常生活中的应用例子吗？\n",
      "AI: 当然！一个很好的例子是智慧家电。许多现代家电，如空调、洗衣机和电视，都开始使用人工智能技术来提供更加便捷的用户体验。例如，一些智能空调可以通过语音命令或手机APP让你远程控制温度、设定定时程序，并且会根据室内环境自动调整冷暖度。甚至一些高端智能电视也能够使用人工智能技术来推荐视频内容、识别人脸等功能，让你的生活更加便捷和方便。你有兴趣知道更多关于智慧家电的信息吗？\n",
      "Human: 这听起来很有趣。AI在医疗领域有什么应用？\n",
      "AI:\u001B[0m\n",
      "\n",
      "\u001B[1m> Finished chain.\u001B[0m\n",
      "AI: 好啊！在医疗领域，AI已经被广泛应用于多个方面。其中一个例子是影像分析。使用深度学习算法的人工智能，可以帮助医生快速和准确地识别病人的图像，如CT扫描或X光片，从而更快地诊断和治疗疾病。例如，一些AI系统可以检测肺癌、乳腺癌等疾病的早期迹象，并提供相应的建议给医生。这可以大大提高医疗保健水平，让患者获得更好的照顾。\n",
      "\n",
      "另一个例子是预测性医疗。一些人工智能系统可以分析病人的健康数据，如血压、体重和症状，从而预测疾病的发展趋势，并提供建议给医生，以便能够采取早期干预措施。这可以帮助减少并发症，改善治疗效果。\n",
      "\n",
      "最后，还有一个例子是个人ized医疗建议。一些人工智能系统，可以根据个人的健康数据和偏好，为其提供个性化的饮食、运动和生活方式建议，这可以帮助他们更健康地生活。\n",
      "\n",
      "你感兴趣吗？\n"
     ]
    }
   ],
   "execution_count": 13
  },
  {
   "cell_type": "markdown",
   "id": "dc132dc39325e21b",
   "metadata": {},
   "source": [
    "### 自定义提示模板\n",
    "\n",
    "设计好的提示模板是创建高效 AI 应用的关键。在这个例子中，我们创建了一个用于生成产品描述的复杂提示："
   ]
  },
  {
   "cell_type": "code",
   "id": "c113ecc30ee7a978",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:55:06.069815Z",
     "start_time": "2024-08-08T02:55:06.064286Z"
    }
   },
   "source": [
    "from langchain_ollama import ChatOllama\n",
    "from langchain_core.messages import SystemMessage, HumanMessage\n",
    "\n",
    "# 初始化ChatOllama模型\n",
    "model = ChatOllama(model=\"llama3.1\", temperature=0.7)"
   ],
   "outputs": [],
   "execution_count": 14
  },
  {
   "cell_type": "code",
   "id": "2601aaba115cb0bf",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:55:06.078239Z",
     "start_time": "2024-08-08T02:55:06.071834Z"
    }
   },
   "source": [
    "system_message = SystemMessage(content=\"\"\"\n",
    "你是一位经验丰富的电商文案撰写专家。你的任务是根据给定的产品信息创作吸引人的商品描述。\n",
    "请确保你的描述简洁、有力，并且突出产品的核心优势。\n",
    "\"\"\")\n",
    "\n",
    "human_message_template = \"\"\"\n",
    "请为以下产品创作一段吸引人的商品描述：\n",
    "产品类型: {product_type}\n",
    "核心特性: {key_feature}\n",
    "目标受众: {target_audience}\n",
    "价格区间: {price_range}\n",
    "品牌定位: {brand_positioning}\n",
    "\n",
    "请提供以下三种不同风格的描述，每种大约50字：\n",
    "1. 理性分析型\n",
    "2. 情感诉求型\n",
    "3. 故事化营销型\n",
    "\"\"\"\n",
    "\n",
    "def generate_product_descriptions(product_info):\n",
    "    human_message = HumanMessage(content=human_message_template.format(**product_info))\n",
    "    messages = [system_message, human_message]\n",
    "    \n",
    "    response = model.invoke(messages)\n",
    "    return response.content\n"
   ],
   "outputs": [],
   "execution_count": 15
  },
  {
   "cell_type": "code",
   "id": "16391b088e8d7665",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T02:56:16.723882Z",
     "start_time": "2024-08-08T02:55:06.080252Z"
    }
   },
   "source": [
    "# 示例使用\n",
    "product_info = {\n",
    "    \"product_type\": \"智能手表\",\n",
    "    \"key_feature\": \"心率监测和睡眠分析\",\n",
    "    \"target_audience\": \"注重健康的年轻专业人士\",\n",
    "    \"price_range\": \"中高端\",\n",
    "    \"brand_positioning\": \"科技与健康的完美结合\"\n",
    "}\n",
    "\n",
    "result = generate_product_descriptions(product_info)\n",
    "print(result)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "**理性分析型**\n",
      "\n",
      "\"拥有一个全面理解身体的伙伴。智能手表带来心率监测和睡眠分析功能，让你深入了解自己的健康状态。通过精确的数据和明智的建议，助力你实现最佳身心状态。科技与健康的完美结合。\"\n",
      "\n",
      "**情感诉求型**\n",
      "\n",
      "\"你想要更好地照顾自己吗?智能手表将成为你的健康伴侣。它会为你提供实时的心率监测和睡眠分析数据，让你有能力做出明智的生活选择。拥有一个关心你的工具，助力你实现理想的身体和心理状态。\"\n",
      "\n",
      "**故事化营销型**\n",
      "\n",
      "\"记得那天晚上，你感到非常疲倦，但却无法入睡。你开始担忧自己的健康状况，希望能找到答案。智能手表就像一位忠诚的朋友，为你提供了心率监测和睡眠分析的线索，让你能够调整你的生活方式，实现最佳的身体和心理状态。这是一个新的开始，一个更好的你即将到来。\"\n"
     ]
    }
   ],
   "execution_count": 16
  },
  {
   "cell_type": "markdown",
   "id": "f1a6a0ad942fb8d4",
   "metadata": {},
   "source": [
    "这个结构有几个重要的设计考虑：\n",
    "\n",
    "1. system_prompt：定义了 AI 的角色和总体任务，设置了整个对话的基调。\n",
    "2. human_message_template：提供了具体的指令和所需信息的结构。\n",
    "3. 多参数设计：允许灵活地适应不同的产品和需求。\n",
    "4. 多样化输出要求：通过要求不同风格的描述，鼓励模型展示其versatility。\n",
    "\n",
    "设计有效的提示模板时，考虑以下几点：\n",
    "\n",
    "- 明确定义 AI 的角色和任务\n",
    "- 提供清晰、结构化的输入格式\n",
    "- 包含具体的输出要求和格式指导\n",
    "- 考虑如何最大化模型的能力和创造力"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "8a9f8be9507d676",
   "metadata": {},
   "source": [
    "### 构建一个简单的RAG问答系统  \n",
    "RAG（Retrieval-Augmented Generation）是一种结合了检索和生成的AI技术，它通过检索相关信息来增强语言模型的回答能力。  \n",
    "RAG的优势在于它可以帮助语言模型访问最新和专业的信息，减少幻觉，并提高回答的准确性和相关性。  \n",
    "  \n",
    "LangChain 提供了多种组件，可以与 Ollama 模型无缝集成。这里我们将展示如何将 Ollama 模型与向量存储和检索器结合使用，创建一个简单的RAG问答系统。\n",
    "\n",
    "首先需要确保下载embedding模型，可以在命令行执行以下命令：  \n",
    "\n",
    "```shell\n",
    "ollama pull nomic-embed-text"
   ]
  },
  {
   "cell_type": "code",
   "id": "3c848e6d6ea912cd",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-08-08T03:02:34.909438Z",
     "start_time": "2024-08-08T03:01:40.650329Z"
    }
   },
   "source": [
    "from langchain_ollama import ChatOllama\n",
    "from langchain_community.vectorstores import FAISS\n",
    "from langchain_ollama import OllamaEmbeddings\n",
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "from langchain_core.runnables import RunnablePassthrough\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "\n",
    "# 初始化 Ollama 模型和嵌入\n",
    "llm = ChatOllama(model=\"llama3.1\")\n",
    "embeddings = OllamaEmbeddings(model=\"nomic-embed-text\")\n",
    "\n",
    "# 准备文档\n",
    "text = \"\"\"\n",
    "Datawhale 是一个专注于数据科学与 AI 领域的开源组织，汇集了众多领域院校和知名企业的优秀学习者，聚合了一群有开源精神和探索精神的团队成员。\n",
    "Datawhale 以“ for the learner，和学习者一起成长”为愿景，鼓励真实地展现自我、开放包容、互信互助、敢于试错和勇于担当。\n",
    "同时 Datawhale 用开源的理念去探索开源内容、开源学习和开源方案，赋能人才培养，助力人才成长，建立起人与人，人与知识，人与企业和人与未来的联结。\n",
    "如果你想在Datawhale开源社区发起一个开源项目，请详细阅读Datawhale开源项目指南[https://github.com/datawhalechina/DOPMC/blob/main/GUIDE.md]\n",
    "\"\"\"\n",
    "\n",
    "text_splitter = RecursiveCharacterTextSplitter(chunk_size=100, chunk_overlap=20)\n",
    "chunks = text_splitter.split_text(text)\n",
    "\n",
    "# 创建向量存储\n",
    "vectorstore = FAISS.from_texts(chunks, embeddings)\n",
    "retriever = vectorstore.as_retriever()\n",
    "\n",
    "# 创建提示模板\n",
    "template = \"\"\"只能使用下列内容回答问题:\n",
    "{context}\n",
    "\n",
    "Question: {question}\n",
    "\"\"\"\n",
    "prompt = ChatPromptTemplate.from_template(template)\n",
    "\n",
    "# 创建检索-问答链\n",
    "chain = (\n",
    "    {\"context\": retriever, \"question\": RunnablePassthrough()}\n",
    "    | prompt\n",
    "    | llm\n",
    ")\n",
    "\n",
    "# 使用链回答问题\n",
    "question = \"我想为datawhale贡献该怎么做？\"\n",
    "response = chain.invoke(question)\n",
    "print(response.content)"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "如果你想为Datawhale贡献，请详细阅读Datawhale开源项目指南（https://github.com/datawhalechina/DOPMC/blob/main/GUIDE.md），这将帮助你理解如何在社区中发起一个开源项目。\n"
     ]
    }
   ],
   "execution_count": 3
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "handlm",
   "language": "python",
   "name": "handlm"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
