{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "f59565a0-62c0-4048-8a7c-e60fba161cd2",
   "metadata": {
    "tags": []
   },
   "source": [
    "# Chat Engine - Best Mode"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "433ea1f0-86e8-4912-8db8-dfe3d6a00b6d",
   "metadata": {},
   "source": [
    "The default chat engine mode is \"best\", which uses the \"openai\" mode if you are using an OpenAI model that supports the latest function calling API, otherwise uses the \"react\" mode"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "9b036854-2947-4ce9-b662-a17494f659b0",
   "metadata": {
    "tags": []
   },
   "source": [
    "### Get started in 5 lines of code"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "600cf600-c0d4-4f15-9424-0a2b07c34e77",
   "metadata": {},
   "source": [
    "Load data and build index"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "e633c998-2d63-41c5-a678-46d7683e324a",
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "from llama_index import VectorStoreIndex, SimpleDirectoryReader, ServiceContext\n",
    "from llama_index.llms import OpenAI, Anthropic\n",
    "\n",
    "service_context = ServiceContext.from_defaults(llm=OpenAI(model=\"gpt-4\"))\n",
    "data = SimpleDirectoryReader(input_dir=\"../data/paul_graham/\").load_data()\n",
    "index = VectorStoreIndex.from_documents(data, service_context=service_context)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "6bba0968-611f-465b-b98e-73eb8a5bc4e4",
   "metadata": {
    "tags": []
   },
   "source": [
    "Configure chat engine"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "938190e1-bb7c-4dda-895e-f8351026f8d9",
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "chat_engine = index.as_chat_engine(chat_mode=\"best\", verbose=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b13bb12d-b583-4f35-bede-6d95a023a2fd",
   "metadata": {},
   "source": [
    "Chat with your data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "301220a3-0369-4104-bbbb-7bd084b793fc",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "=== Calling Function ===\n",
      "Calling function: query_engine_tool with args: {\n",
      "  \"input\": \"What are the first programs Paul Graham tried writing?\"\n",
      "}\n",
      "Got output: The first programs Paul Graham tried writing were on the IBM 1401 that their school district used for \"data processing.\" The language he used was an early version of Fortran.\n",
      "========================\n"
     ]
    }
   ],
   "source": [
    "response = chat_engine.chat(\"What are the first programs Paul Graham tried writing?\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "50051bbf-ef40-4be0-bb74-b42e944a4c38",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The first programs Paul Graham tried writing were on the IBM 1401 using an early version of Fortran.\n"
     ]
    }
   ],
   "source": [
    "print(response)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "55861006-57b5-4d07-b4b7-2255c4add73a",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.16"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
