{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "2ed7178e-1a9c-4784-9ae8-3dd125db0146",
   "metadata": {},
   "source": [
    "# Mistral Ai Chat - ChatMistralAI\n",
    "\n",
    "https://python.langchain.com/docs/integrations/chat/mistralai\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "945cd540-b243-407e-b5bc-347839ea75c7",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Name: langchain-mistralai\n",
      "Version: 0.0.5\n",
      "Summary: An integration package connecting Mistral and LangChain\n",
      "Home-page: https://github.com/langchain-ai/langchain\n",
      "Author: \n",
      "Author-email: \n",
      "License: MIT\n",
      "Location: /opt/conda/lib/python3.11/site-packages\n",
      "Requires: langchain-core, mistralai, tokenizers\n",
      "Required-by: \n"
     ]
    }
   ],
   "source": [
    "!pip show langchain_mistralai"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "53b0140b-97b6-4339-8a1b-a6740ad4107d",
   "metadata": {},
   "source": [
    "## Create ChatMistralAI "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "a9cfc7ca-4d18-47d9-9803-cfedb09e3f09",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "from dotenv import load_dotenv\n",
    "from langchain_mistralai.chat_models import ChatMistralAI\n",
    "\n",
    "load_dotenv()\n",
    "\n",
    "chat = ChatMistralAI(mistral_api_key=os.environ[\"MISTRAL_API_KEY\"], model=\"open-mistral-7b\")\n",
    "\n",
    "# class ChatMistralAI(BaseChatModel):\n",
    "#     \"\"\"A chat model that uses the MistralAI API.\"\"\"\n",
    "\n",
    "#     client: MistralClient = Field(default=None)  #: :meta private:\n",
    "#     async_client: MistralAsyncClient = Field(default=None)  #: :meta private:\n",
    "#     mistral_api_key: Optional[SecretStr] = None\n",
    "#     endpoint: str = DEFAULT_MISTRAL_ENDPOINT\n",
    "#     max_retries: int = 5\n",
    "#     timeout: int = 120\n",
    "#     max_concurrent_requests: int = 64\n",
    "\n",
    "#     model: str = \"mistral-small\"\n",
    "#     temperature: float = 0.7\n",
    "#     max_tokens: Optional[int] = None\n",
    "#     top_p: float = 1\n",
    "#     \"\"\"Decode using nucleus sampling: consider the smallest set of tokens whose\n",
    "#        probability sum is at least top_p. Must be in the closed interval [0.0, 1.0].\"\"\"\n",
    "#     random_seed: Optional[int] = None\n",
    "#     safe_mode: bool = False"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "aa4820c0-de54-41ca-8454-90ee67357956",
   "metadata": {},
   "source": [
    "## Create Retriever backed by LanceDB vectorstore\n",
    "\n",
    "Data has already been embeded into LanceDB, please refer to `ollama-embeding.ipynb` for detail."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "99cc4cee-396e-414d-9004-f7aef1a7f6f4",
   "metadata": {},
   "outputs": [],
   "source": [
    "%%time\n",
    "\n",
    "import pyarrow as pa\n",
    "import lancedb\n",
    "from langchain_community.vectorstores import LanceDB\n",
    "\n",
    "db = lancedb.connect(\"lancedb\")\n",
    "# connect to the existing table\n",
    "language = db.open_table('language')\n",
    "\n",
    "language_vectorstore = LanceDB(language, embeddings)\n",
    "\n",
    "retriever = language_vectorstore.as_retriever(search_kwargs={\"k\": 1})"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "d161b155-1832-4800-aa90-2001b59b2cfa",
   "metadata": {},
   "source": [
    "## Load Instructions/Prompts"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "534ff961-7a09-4287-954d-458d8209e6af",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [00:00<00:00, 564.59it/s]\n"
     ]
    }
   ],
   "source": [
    "from langchain_community.document_loaders import DirectoryLoader, TextLoader\n",
    "\n",
    "loader = DirectoryLoader('structurizr/llm', glob=\"instruction01.txt\", show_progress=True, use_multithreading=True, loader_cls=TextLoader)\n",
    "\n",
    "instructions = loader.load()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "00f7037d-81fc-4710-920a-7387d07ae1b4",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "You are an expert on structurizr which is a domain specific language (DSL) for software architecture modeling and documenation.\n",
      "You will be provided a keyword in structurizr DSL, you need to provide the keyword gramma precisely and consistently.\n",
      "You need to check the permited children of the keyword.\n",
      "You must strictly adhere to the keyword gramma to generate the output.\n",
      "You need to embed the permited children of the keyword in the gramma.\n",
      "You do not need to provide the permited children out of the gramma.\n",
      "You need to provide the gramma, you do not need to provide any explanations.\n",
      "You do not need to privde code examples or explanations. \n",
      "\n"
     ]
    }
   ],
   "source": [
    "print(instruction[0].page_content)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1c427e7c-9334-4d33-926c-979cf41cc306",
   "metadata": {},
   "source": [
    "## Retrieval\n",
    "\n",
    "please refer to ollama-embeding.ipynb for embeding"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "6d751631-d27e-4c50-b05c-dea0b637d4d0",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 71.6 ms, sys: 3.46 ms, total: 75 ms\n",
      "Wall time: 78.9 ms\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "import utility\n",
    "embeddings = utility.get_embeddings()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "4b7234f9-2c1f-4fba-a95d-22839abfe82f",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 271 ms, sys: 45.9 ms, total: 317 ms\n",
      "Wall time: 666 ms\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "import pyarrow as pa\n",
    "import lancedb\n",
    "from langchain_community.vectorstores import LanceDB\n",
    "\n",
    "db = lancedb.connect(\"lancedb\")\n",
    "\n",
    "language = db.open_table('language')\n",
    "\n",
    "language_vectorstore = LanceDB(language, embeddings)\n",
    "\n",
    "retriever = language_vectorstore.as_retriever(search_kwargs={\"k\": 1})"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "73c8e292-7b1e-48db-ade9-ae7becec7750",
   "metadata": {},
   "source": [
    "## ChatMessage"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "10495493-1dde-461c-aeda-a03fd55c62f1",
   "metadata": {},
   "outputs": [],
   "source": [
    "keyword = \"workspace\"\n",
    "\n",
    "docs = retriever.get_relevant_documents(keyword)\n",
    "\n",
    "language_gramma = f\"The gramma of {keyword} is provided here: {docs[0].page_content}\" "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "d71b852d-6990-42e5-a675-78c5ded97aee",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain_core.messages import HumanMessage,SystemMessage, ToolMessage, ChatMessage\n",
    "\n",
    "messages = [\n",
    "    SystemMessage(content=instruction[0].page_content),\n",
    "    SystemMessage(content=language_gramma),\n",
    "    HumanMessage(content=keyword)\n",
    "]"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "37af6422-e697-4642-a853-d1de96538493",
   "metadata": {},
   "source": [
    "## Invoke"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "31c4b3c6-a009-42da-abcc-67e99946ab7f",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "content='```\\nworkspace [name] [description] {\\n  ( name: <name> )?\\n  ( description: <description> )?\\n  ( properties { /* ... */ } )?\\n  ( !docs: [view](#view)] )?\\n  ( !adrs: [adrs](#adrs)] )?\\n  ( !identifiers: [identifier](#identifier)]* )?\\n  ( !impliedRelationships: [relationship](#relationship)* )?\\n  model { /* ... */ }\\n  views { /* ... */ }\\n  ( configuration { /* ... */ } )?\\n}\\n\\nworkspace extends <file|url> {\\n  extends: <file|url>\\n}\\n```' response_metadata={'finish_reason': 'stop'}\n",
      "CPU times: user 94.6 ms, sys: 5.11 ms, total: 99.7 ms\n",
      "Wall time: 3.76 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "chat_response = chat.invoke(messages)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "23dabfc7-0762-4486-ac9f-af73b7b59957",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "```\n",
      "workspace [name] [description] {\n",
      "  ( name: <name> )?\n",
      "  ( description: <description> )?\n",
      "  ( properties { /* ... */ } )?\n",
      "  ( !docs: [view](#view)] )?\n",
      "  ( !adrs: [adrs](#adrs)] )?\n",
      "  ( !identifiers: [identifier](#identifier)]* )?\n",
      "  ( !impliedRelationships: [relationship](#relationship)* )?\n",
      "  model { /* ... */ }\n",
      "  views { /* ... */ }\n",
      "  ( configuration { /* ... */ } )?\n",
      "}\n",
      "\n",
      "workspace extends <file|url> {\n",
      "  extends: <file|url>\n",
      "}\n",
      "```\n"
     ]
    }
   ],
   "source": [
    "print(chat_response.content, flush=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "b0b68c28-1d5c-4dec-aee3-fca05b0090a2",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'finish_reason': 'stop'}\n"
     ]
    }
   ],
   "source": [
    "print(chat_response.response_metadata, flush=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "fca4c997-f9da-4049-b11b-c6160f86b298",
   "metadata": {},
   "source": [
    "## Chaining"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "ae047388-b2ca-4560-bb4e-463708a372eb",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "\n",
    "# prompt = ChatPromptTemplate.from_template(\"Tell me a joke about {topic}\")\n",
    "\n",
    "prompt = ChatPromptTemplate.from_messages(messages)\n",
    "\n",
    "chain = prompt | chat"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "dcc438dc-54f8-475c-aa6d-9c5e3e5d50a8",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 40.6 ms, sys: 11.3 ms, total: 51.9 ms\n",
      "Wall time: 8.47 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "chat_response = chat.invoke(messages)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "9778bc75-83a1-461e-a549-cb4b629688fd",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "```\n",
      "workspace [name] [description] {\n",
      "  (name: <name>)\n",
      "  (description: <description>)\n",
      "  (properties {\n",
      "    ...\n",
      "  })\n",
      "  (!docs {\n",
      "    ...\n",
      "  })\n",
      "  (!adrs {\n",
      "    ...\n",
      "  })\n",
      "  (!identifiers {\n",
      "    ...\n",
      "  })\n",
      "  (!impliedRelationships {\n",
      "    ...\n",
      "  })\n",
      "  model {\n",
      "    ...\n",
      "  }\n",
      "  views {\n",
      "    ...\n",
      "  }\n",
      "  configuration {\n",
      "    ...\n",
      "  }\n",
      "}\n",
      "\n",
      "workspace extends <file|url> {\n",
      "  (extends: <file|url>)\n",
      "  (workspace-contents {\n",
      "    ...\n",
      "  })\n",
      "}\n",
      "```\n"
     ]
    }
   ],
   "source": [
    "print(chat_response.content, flush=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b89f5a67-ff55-4f52-a335-f1f787377ddc",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
