{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "6961a9ea-6ffc-41f5-9ac8-c0ff248c6056",
   "metadata": {},
   "source": [
    "# 实现GraphRAG\n",
    "\n",
    "Implementing GraphRAG for Query-Focused Summarization\n",
    "\n",
    "https://dev.to/stephenc222/implementing-graphrag-for-query-focused-summarization-47ib"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 85,
   "id": "1ab7825a-4cf9-448e-8fd9-bf428270c87d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: networkx in /Users/laobao/opt/anaconda3/envs/llama-index/lib/python3.10/site-packages (3.2.1)\n"
     ]
    }
   ],
   "source": [
    "# !pip install openai networkx leidenalg cdlib python-igraph python-dotenv\n",
    "!pip install networkx"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "f18907fb-8af7-4e10-b125-b33bf248da39",
   "metadata": {},
   "source": [
    "# 实现步骤"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 103,
   "id": "b17e4ba0-7af9-432b-bfca-3bd079897e50",
   "metadata": {},
   "outputs": [],
   "source": [
    "from openai import OpenAI\n",
    "import networkx as nx\n",
    "from cdlib import algorithms\n",
    "import os"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3b481a69-792c-4f2c-b016-88c2e70087df",
   "metadata": {},
   "source": [
    "## 0. 读取文档\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 104,
   "id": "6012a718-1fa4-47ab-b7bd-002bec487c20",
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "# Function to read the content of each document from the example_text directory\n",
    "def read_documents_from_files():\n",
    "    documents = []\n",
    "    directory = \"data\"\n",
    "    for filename in os.listdir(directory):\n",
    "        if filename.endswith(\".txt\"):\n",
    "            file_path = os.path.join(directory, filename)\n",
    "            with open(file_path, 'r', encoding='utf-8') as file:\n",
    "                documents.append(file.read())\n",
    "    return documents\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 105,
   "id": "e63deb78-1764-42b5-945c-bbca06b2e886",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Read documents and store them in the DOCUMENTS list\n",
    "DOCUMENTS = read_documents_from_files()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 106,
   "id": "9eaab55c-c4e7-42e8-9192-86e0754e356d",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'### Document 1: Climate Change Impacts on Global Agriculture\\n\\nClimate change is a significant enviro'"
      ]
     },
     "execution_count": 106,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 测试一下，显示文档1的前100个字符\n",
    "DOCUMENTS[0][:100]"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "583ef3ca-1a40-48e1-972f-ae9570027b8d",
   "metadata": {},
   "source": [
    "## 1. 文本分块\n",
    "Source Documents → Text Chunks"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 107,
   "id": "d6179bc6-44a6-490e-bd19-531b9461d3b0",
   "metadata": {},
   "outputs": [],
   "source": [
    "def split_documents_into_chunks(documents, chunk_size=600, overlap_size=100):\n",
    "    chunks = []\n",
    "    for document in documents:\n",
    "        for i in range(0, len(document), chunk_size - overlap_size):\n",
    "            chunk = document[i:i + chunk_size]\n",
    "            chunks.append(chunk)\n",
    "    return chunks"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "5e6e9033-5925-477f-b1e0-c27bfbc521b1",
   "metadata": {},
   "source": [
    "## 2. 抽取实体和关系\n",
    "Text Chunks → Element Instances"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 108,
   "id": "81b121f5-3985-4d12-975c-b0271a3ea99b",
   "metadata": {},
   "outputs": [],
   "source": [
    "def extract_elements_from_chunks(chunks,model=\"openai/deepseek-chat\"):\n",
    "    elements = []\n",
    "    for index, chunk in enumerate(chunks):\n",
    "        response = client.chat.completions.create(\n",
    "            # model=\"gpt-4\",\n",
    "            model = model,\n",
    "            messages=[\n",
    "                {\"role\": \"system\", \"content\": \"Extract entities and relationships from the following text.\"},\n",
    "                {\"role\": \"user\", \"content\": chunk}\n",
    "            ]\n",
    "        )\n",
    "        entities_and_relations = response.choices[0].message.content\n",
    "        elements.append(entities_and_relations)\n",
    "    return elements"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b307bcae-ced6-49ca-afdc-5a3ae54319b0",
   "metadata": {},
   "source": [
    "## 3. Element Instances → Element Summaries\n",
    "We summarize the extracted entities and relationships into a structured format."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 109,
   "id": "10ae287e-956b-44ee-b7da-6093d73d4841",
   "metadata": {},
   "outputs": [],
   "source": [
    "def summarize_elements(elements,model=\"openai/deepseek-chat\"):\n",
    "    summaries = []\n",
    "    for index, element in enumerate(elements):\n",
    "        response = client.chat.completions.create(\n",
    "            # model=\"gpt-4\",\n",
    "            model = model,\n",
    "            messages=[\n",
    "                {\"role\": \"system\", \"content\": \"Summarize the following entities and relationships in a structured format. Use \\\"->\\\" to represent relationships, after the \\\"Relationships:\\\" word.\"},\n",
    "                {\"role\": \"user\", \"content\": element}\n",
    "            ]\n",
    "        )\n",
    "        summary = response.choices[0].message.content\n",
    "        summaries.append(summary)\n",
    "    return summaries"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "e2d53cd1-85ba-4b4d-bcb5-d1d3b0a3f93e",
   "metadata": {},
   "source": [
    "## 4. 实体关系转换为图\n",
    "Element Summaries → Graph Communities\n",
    "\n",
    "We build a graph from the element summaries and detect communities using the **Leiden** algorithm."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 110,
   "id": "44189d77-4047-42fa-9b98-bc6bcb71cbf2",
   "metadata": {},
   "outputs": [],
   "source": [
    "def build_graph_from_summaries(summaries):\n",
    "    G = nx.Graph()\n",
    "    for summary in summaries:\n",
    "        lines = summary.split(\"\\n\")\n",
    "        entities_section = False\n",
    "        relationships_section = False\n",
    "        entities = []\n",
    "        for line in lines:\n",
    "            if line.startswith(\"### Entities:\") or line.startswith(\"**Entities:**\"):\n",
    "                entities_section = True\n",
    "                relationships_section = False\n",
    "                continue\n",
    "            elif line.startswith(\"### Relationships:\") or line.startswith(\"**Relationships:**\"):\n",
    "                entities_section = False\n",
    "                relationships_section = True\n",
    "                continue\n",
    "            if entities_section and line.strip():\n",
    "                entity = line.split(\".\", 1)[1].strip() if line[0].isdigit() and line[1] == \".\" else line.strip()\n",
    "                entity = entity.replace(\"**\", \"\")\n",
    "                entities.append(entity)\n",
    "                G.add_node(entity)\n",
    "            elif relationships_section and line.strip():\n",
    "                parts = line.split(\"->\")\n",
    "                if len(parts) >= 2:\n",
    "                    source = parts[0].strip()\n",
    "                    target = parts[-1].strip()\n",
    "                    relation = \" -> \".join(parts[1:-1]).strip()\n",
    "                    G.add_edge(source, target, label=relation)\n",
    "    return G\n",
    "\n",
    "def detect_communities(graph):\n",
    "    communities = []\n",
    "    for component in nx.connected_components(graph):\n",
    "        subgraph = graph.subgraph(component)\n",
    "        if len(subgraph.nodes) > 1:\n",
    "            try:\n",
    "                sub_communities = algorithms.leiden(subgraph)\n",
    "                for community in sub_communities.communities:\n",
    "                    communities.append(list(community))\n",
    "            except Exception as e:\n",
    "                print(f\"Error processing community: {e}\")\n",
    "        else:\n",
    "            communities.append(list(subgraph.nodes))\n",
    "    return communities"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1c44b371-2c95-4ea2-be67-08b3aa724b0e",
   "metadata": {},
   "source": [
    "## 5. 图社区Summaries\n",
    "Graph Communities → Community Summaries\n",
    "We summarize each detected community."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 111,
   "id": "ed879fad-209f-4273-a4ed-ddbded9827a9",
   "metadata": {},
   "outputs": [],
   "source": [
    "def summarize_communities(communities, graph,model=\"openai/deepseek-chat\"):\n",
    "    community_summaries = []\n",
    "    for index, community in enumerate(communities):\n",
    "        subgraph = graph.subgraph(community)\n",
    "        nodes = list(subgraph.nodes)\n",
    "        edges = list(subgraph.edges(data=True))\n",
    "        description = \"Entities: \" + \", \".join(nodes) + \"\\nRelationships: \"\n",
    "        relationships = []\n",
    "        for edge in edges:\n",
    "            relationships.append(\n",
    "                f\"{edge[0]} -> {edge[2]['label']} -> {edge[1]}\")\n",
    "        description += \", \".join(relationships)\n",
    "\n",
    "        response = client.chat.completions.create(\n",
    "            # model=\"gpt-4\",\n",
    "            model = model,\n",
    "            messages=[\n",
    "                {\"role\": \"system\", \"content\": \"Summarize the following community of entities and relationships.\"},\n",
    "                {\"role\": \"user\", \"content\": description}\n",
    "            ]\n",
    "        )\n",
    "        summary = response.choices[0].message.content.strip()\n",
    "        community_summaries.append(summary)\n",
    "    return community_summaries"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "35724c78-f483-422a-b3bc-e5be48905321",
   "metadata": {},
   "source": [
    "## 6. 生成回答\n",
    "Community Summaries → Community Answers → Global Answer\n",
    "从社区摘要中生成答案，并将它们组合成最终的全局答案。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 112,
   "id": "9eb5497c-2d3c-4305-863e-9d6c6d853848",
   "metadata": {},
   "outputs": [],
   "source": [
    "def generate_answers_from_communities(community_summaries, query,model=\"openai/deepseek-chat\"):\n",
    "    intermediate_answers = []\n",
    "    for summary in community_summaries:\n",
    "        response = client.chat.completions.create(\n",
    "            # model=\"gpt-4\",\n",
    "            model = model,\n",
    "            messages=[\n",
    "                {\"role\": \"system\", \"content\": \"Answer the following query based on the provided summary.\"},\n",
    "                {\"role\": \"user\", \"content\": f\"Query: {query} Summary: {summary}\"}\n",
    "            ]\n",
    "        )\n",
    "        intermediate_answers.append(response.choices[0].message.content)\n",
    "\n",
    "    final_response = client.chat.completions.create(\n",
    "        # model=\"gpt-4\",\n",
    "        model = model,\n",
    "        messages=[\n",
    "            {\"role\": \"system\", \"content\": \"Combine these answers into a final, concise response.\"},\n",
    "            {\"role\": \"user\", \"content\": f\"Intermediate answers: {intermediate_answers}\"}\n",
    "        ]\n",
    "    )\n",
    "    final_answer = final_response.choices[0].message.content\n",
    "    return final_answer"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "63e9d2cf-c8db-421b-aa3c-7cfeca9881cf",
   "metadata": {},
   "source": [
    "## 开始使用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 113,
   "id": "72f4e2ee-2b50-4f07-ba3a-a677096e39db",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Putting It All Together\n",
    "def graph_rag_pipeline(documents, query, chunk_size=600, overlap_size=100):\n",
    "    # Step 1: Split documents into chunks\n",
    "    chunks = split_documents_into_chunks(\n",
    "        documents, chunk_size, overlap_size)\n",
    "\n",
    "    # Step 2: Extract elements from chunks\n",
    "    elements = extract_elements_from_chunks(chunks)\n",
    "\n",
    "    # Step 3: Summarize elements\n",
    "    summaries = summarize_elements(elements)\n",
    "\n",
    "    # Step 4: Build graph and detect communities\n",
    "    graph = build_graph_from_summaries(summaries)\n",
    "    print(\"graph:\", graph)\n",
    "    communities = detect_communities(graph)\n",
    "\n",
    "    print(\"communities:\", communities[0])\n",
    "    # Step 5: Summarize communities\n",
    "    community_summaries = summarize_communities(communities, graph)\n",
    "\n",
    "    # Step 6: Generate answers from community summaries\n",
    "    final_answer = generate_answers_from_communities(\n",
    "        community_summaries, query)\n",
    "\n",
    "    return final_answer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 114,
   "id": "a4e9e728-58c1-45e9-acac-28957c9684ba",
   "metadata": {},
   "outputs": [],
   "source": [
    "from openai import OpenAI\n",
    "\n",
    "client = OpenAI(api_key=\"sk-1234\", base_url=\"http://1.15.125.13:3033/v1\")\n",
    "model_name = \"openai/deepseek-chat\"\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 115,
   "id": "fe83945b-8548-4cfd-b702-2dc1ab7da8d1",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Query: What are the main themes in these documents?\n"
     ]
    }
   ],
   "source": [
    "# Example usage\n",
    "query = \"What are the main themes in these documents?\"\n",
    "print('Query:', query)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 116,
   "id": "a986a925-3972-4112-8827-96a6f00e0189",
   "metadata": {},
   "outputs": [],
   "source": [
    "chunk_size=600\n",
    "overlap_size=100\n",
    "\n",
    "\n",
    "chunks = split_documents_into_chunks(DOCUMENTS, chunk_size, overlap_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 117,
   "id": "e6273934-c12e-41ea-b317-bf499148815a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'### Document 1: Climate Change Impacts on Global Agriculture\\n\\nClimate change is a significant environmental challenge that has profound implications for agriculture worldwide. As the global climate continues to warm due to human activities, including the burning of fossil fuels and deforestation, agriculture faces increasing risks and uncertainties. The impacts of climate change on agriculture are multifaceted, affecting crop yields, livestock productivity, and the stability of food systems.\\n\\nOne of the primary ways climate change affects agriculture is through changes in temperature and preci'"
      ]
     },
     "execution_count": 117,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "chunks[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 118,
   "id": "a63412ee-b74e-4bb3-9252-971f9b3b873f",
   "metadata": {},
   "outputs": [],
   "source": [
    "elements = extract_elements_from_chunks(chunks)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 119,
   "id": "e7608d7b-3cc9-43c9-854f-42ae7bff7dde",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'### Extracted Entities and Relationships:\\n\\n1. **Entity: Climate Change**\\n   - **Type:** Environmental challenge\\n   - **Implications:** Impacts agriculture worldwide\\n\\n2. **Entity: Global Agriculture**\\n   - **Impacted By:** Climate change\\n   - **Effects:** Changes in crop yields, livestock productivity, and food system stability\\n\\n3. **Entity: Human Activities**\\n   - **Examples:** Burning of fossil fuels, deforestation\\n   - **Impact:** Contributes to global warming, which affects climate change\\n\\n4. **Entity: Global Warming**\\n   - **Cause:** Human activities (burning of fossil fuels, deforestation)\\n   - **Effect:** Increases risks and uncertainties for agriculture\\n\\n5. **Entity: Crop Yields**\\n   - **Affected By:** Climate change\\n   - **Impact:** Changes due to temperature variations\\n\\n6. **Entity: Livestock Productivity**\\n   - **Affected By:** Climate change\\n   - **Impact:** Changes due to environmental conditions\\n\\n7. **Entity: Food Systems**\\n   - **Stability Affected By:** Climate change\\n   - **Impact:** Uncertainties and risks due to climate variations\\n\\n### Relationships:\\n\\n- **Climate Change** impacts **Global Agriculture** by affecting **Crop Yields**, **Livestock Productivity**, and **Food Systems Stability**.\\n- **Human Activities** such as **Burning of Fossil Fuels** and **Deforestation** lead to **Global Warming**, which in turn exacerbates **Climate Change**.\\n- **Climate Change** is a result of **Global Warming** caused by **Human Activities**.\\n- **Crop Yields** and **Livestock Productivity** are directly influenced by the changes in environmental conditions brought about by **Climate Change**.\\n- The **Stability of Food Systems** is threatened by the increasing **Risks and Uncertainties** associated with **Climate Change**.'"
      ]
     },
     "execution_count": 119,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "elements[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 120,
   "id": "cecafc37-7d4c-4254-96da-cd9ef3054f2f",
   "metadata": {},
   "outputs": [],
   "source": [
    "summaries = summarize_elements(elements)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 121,
   "id": "afc0492f-8c79-4ac7-b8c8-77ff59782b12",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'### Structured Summary of Entities and Relationships:\\n\\n1. **Entity: Climate Change**\\n   - **Type:** Environmental challenge\\n   - **Implications:** Impacts agriculture worldwide\\n\\n2. **Entity: Global Agriculture**\\n   - **Impacted By:** Climate change\\n   - **Effects:** Changes in crop yields, livestock productivity, and food system stability\\n\\n3. **Entity: Human Activities**\\n   - **Examples:** Burning of fossil fuels, deforestation\\n   - **Impact:** Contributes to global warming, which affects climate change\\n\\n4. **Entity: Global Warming**\\n   - **Cause:** Human activities (burning of fossil fuels, deforestation)\\n   - **Effect:** Increases risks and uncertainties for agriculture\\n\\n5. **Entity: Crop Yields**\\n   - **Affected By:** Climate change\\n   - **Impact:** Changes due to temperature variations\\n\\n6. **Entity: Livestock Productivity**\\n   - **Affected By:** Climate change\\n   - **Impact:** Changes due to environmental conditions\\n\\n7. **Entity: Food Systems**\\n   - **Stability Affected By:** Climate change\\n   - **Impact:** Uncertainties and risks due to climate variations\\n\\n### Relationships:\\n\\n- Climate Change -> Global Agriculture (Impacts)\\n- Climate Change -> Crop Yields (Affects)\\n- Climate Change -> Livestock Productivity (Affects)\\n- Climate Change -> Food Systems Stability (Affects)\\n- Human Activities -> Global Warming (Leads to)\\n- Global Warming -> Climate Change (Exacerbates)\\n- Climate Change <- Global Warming (Result of)\\n- Climate Change <- Human Activities (Result of)'"
      ]
     },
     "execution_count": 121,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "summaries[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 122,
   "id": "62f10001-9999-4ea2-8b7c-b5cf1e3c2bbb",
   "metadata": {},
   "outputs": [],
   "source": [
    "graph = build_graph_from_summaries(summaries)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 123,
   "id": "406390cf-3626-4e41-b010-f3b64599b1cd",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<bound method Graph.subgraph of <networkx.classes.graph.Graph object at 0x2a173aec0>>"
      ]
     },
     "execution_count": 123,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "graph.subgraph"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 124,
   "id": "6ad3f866-f281-4172-b263-dcc1a931cf25",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Error processing community: invalid literal for int() with base 10: 'Global Agriculture (Impacts)'\n",
      "Error processing community: invalid literal for int() with base 10: 'Global Warming (Leads to)'\n",
      "Error processing community: invalid literal for int() with base 10: '- Global Warming'\n",
      "Error processing community: invalid literal for int() with base 10: '11. Precipitation patterns'\n",
      "Error processing community: invalid literal for int() with base 10: '2. Climate change'\n",
      "Error processing community: invalid literal for int() with base 10: 'Animals'\n",
      "Error processing community: invalid literal for int() with base 10: 'Growing season'\n",
      "Error processing community: invalid literal for int() with base 10: '5. Water availability'\n",
      "Error processing community: invalid literal for int() with base 10: '6. Higher temperatures'\n",
      "Error processing community: invalid literal for int() with base 10: 'Reduced yields'\n",
      "Error processing community: invalid literal for int() with base 10: '7. Accelerated development'\n",
      "Error processing community: invalid literal for int() with base 10: 'Smaller harvests'\n",
      "Error processing community: invalid literal for int() with base 10: 'Lower-quality produce'\n",
      "Error processing community: invalid literal for int() with base 10: 'Crops during critical growth stages'\n",
      "Error processing community: invalid literal for int() with base 10: 'Regions'\n",
      "Error processing community: invalid literal for int() with base 10: 'Irrigation'\n",
      "Error processing community: invalid literal for int() with base 10: 'more intense and frequent droughts'\n",
      "Error processing community: invalid literal for int() with base 10: 'increased rainfall and flooding'\n",
      "Error processing community: invalid literal for int() with base 10: '8. Flooding'\n",
      "Error processing community: invalid literal for int() with base 10: 'Crops effectively'\n",
      "Error processing community: invalid literal for int() with base 10: '10. Climate change'\n",
      "Error processing community: invalid literal for int() with base 10: '4. Heat stress'\n",
      "Error processing community: invalid literal for int() with base 10: '5. Heat stress'\n",
      "Error processing community: invalid literal for int() with base 10: 'Grazing lands, Water resources'\n",
      "Error processing community: invalid literal for int() with base 10: 'Herds'\n",
      "Error processing community: invalid literal for int() with base 10: 'increased food prices'\n",
      "Error processing community: invalid literal for int() with base 10: '4. Increased food prices'\n",
      "Error processing community: invalid literal for int() with base 10: '5. Difficulty in accessing adequate nutrition'\n",
      "Error processing community: invalid literal for int() with base 10: '6. Exacerbated inequalities'\n",
      "Error processing community: invalid literal for int() with base 10: 'necessitate shifts in agricultural practices and crop varieties'\n",
      "Error processing community: invalid literal for int() with base 10: 'require farmers to adapt to new conditions'\n",
      "Error processing community: invalid literal for int() with base 10: '9. Adaptation to new conditions'\n",
      "Error processing community: invalid literal for int() with base 10: '2. Farmers'\n",
      "Error processing community: invalid literal for int() with base 10: '4. Adaptation strategies'\n",
      "Error processing community: invalid literal for int() with base 10: '3. Adaptation'\n",
      "Error processing community: invalid literal for int() with base 10: '5. Adaptation strategies'\n",
      "Error processing community: invalid literal for int() with base 10: 'Sustainable soil management techniques'\n",
      "Error processing community: invalid literal for int() with base 10: '7. Climate risk management'\n",
      "Error processing community: invalid literal for int() with base 10: 'Farming communities'\n",
      "Error processing community: invalid literal for int() with base 10: '9. Farming communities'\n",
      "Error processing community: invalid literal for int() with base 10: 'Threat to global agriculture'\n",
      "Error processing community: invalid literal for int() with base 10: 'Impact on crop yields and livestock productivity'\n",
      "Error processing community: invalid literal for int() with base 10: '3. Impact on crop yields and livestock productivity'\n",
      "Error processing community: invalid literal for int() with base 10: '4. Climate-related shocks'\n",
      "Error processing community: invalid literal for int() with base 10: '5. Warming world'\n",
      "Error processing community: invalid literal for int() with base 10: 'Warming world: Agriculture faces challenges due to a warming global climate.'\n",
      "communities: ['Climate change']\n"
     ]
    }
   ],
   "source": [
    "communities = detect_communities(graph)\n",
    "print(\"communities:\", communities[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 125,
   "id": "c4d231ee-9494-40db-84f0-06fd103728ad",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Step 5: Summarize communities\n",
    "community_summaries = summarize_communities(communities, graph)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 126,
   "id": "3fe89c0d-a4be-4985-a3a1-64dae4a57ae2",
   "metadata": {},
   "outputs": [
    {
     "ename": "BadRequestError",
     "evalue": "Error code: 400 - {'error': {'message': \"OpenAIException - Error code: 400 - {'object': 'error', 'message': 'Conversation roles must alternate user/assistant/user/assistant/...', 'type': 'BadRequestError', 'param': None, 'code': 400}\", 'type': None, 'param': None, 'code': 400}}",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mBadRequestError\u001b[0m                           Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[126], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m final_answer \u001b[38;5;241m=\u001b[39m \u001b[43mgenerate_answers_from_communities\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m      2\u001b[0m \u001b[43m        \u001b[49m\u001b[43mcommunity_summaries\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mopenai/Mixtral-8x7B-Instruct\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "Cell \u001b[0;32mIn[112], line 4\u001b[0m, in \u001b[0;36mgenerate_answers_from_communities\u001b[0;34m(community_summaries, query, model)\u001b[0m\n\u001b[1;32m      2\u001b[0m intermediate_answers \u001b[38;5;241m=\u001b[39m []\n\u001b[1;32m      3\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m summary \u001b[38;5;129;01min\u001b[39;00m community_summaries:\n\u001b[0;32m----> 4\u001b[0m     response \u001b[38;5;241m=\u001b[39m \u001b[43mclient\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mchat\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcompletions\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m      5\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;66;43;03m# model=\"gpt-4\",\u001b[39;49;00m\n\u001b[1;32m      6\u001b[0m \u001b[43m        \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m      7\u001b[0m \u001b[43m        \u001b[49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\n\u001b[1;32m      8\u001b[0m \u001b[43m            \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrole\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msystem\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcontent\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mAnswer the following query based on the provided summary.\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m      9\u001b[0m \u001b[43m            \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrole\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43muser\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcontent\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43mf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mQuery: \u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43mquery\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124;43m Summary: \u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43msummary\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m}\u001b[49m\n\u001b[1;32m     10\u001b[0m \u001b[43m        \u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m     11\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     12\u001b[0m     intermediate_answers\u001b[38;5;241m.\u001b[39mappend(response\u001b[38;5;241m.\u001b[39mchoices[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mmessage\u001b[38;5;241m.\u001b[39mcontent)\n\u001b[1;32m     14\u001b[0m final_response \u001b[38;5;241m=\u001b[39m client\u001b[38;5;241m.\u001b[39mchat\u001b[38;5;241m.\u001b[39mcompletions\u001b[38;5;241m.\u001b[39mcreate(\n\u001b[1;32m     15\u001b[0m     \u001b[38;5;66;03m# model=\"gpt-4\",\u001b[39;00m\n\u001b[1;32m     16\u001b[0m     model \u001b[38;5;241m=\u001b[39m model,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m     20\u001b[0m     ]\n\u001b[1;32m     21\u001b[0m )\n",
      "File \u001b[0;32m~/opt/anaconda3/envs/llama-index/lib/python3.10/site-packages/openai/_utils/_utils.py:275\u001b[0m, in \u001b[0;36mrequired_args.<locals>.inner.<locals>.wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m    273\u001b[0m             msg \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMissing required argument: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mquote(missing[\u001b[38;5;241m0\u001b[39m])\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m    274\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(msg)\n\u001b[0;32m--> 275\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m~/opt/anaconda3/envs/llama-index/lib/python3.10/site-packages/openai/resources/chat/completions.py:663\u001b[0m, in \u001b[0;36mCompletions.create\u001b[0;34m(self, messages, model, frequency_penalty, function_call, functions, logit_bias, logprobs, max_tokens, n, presence_penalty, response_format, seed, stop, stream, temperature, tool_choice, tools, top_logprobs, top_p, user, extra_headers, extra_query, extra_body, timeout)\u001b[0m\n\u001b[1;32m    611\u001b[0m \u001b[38;5;129m@required_args\u001b[39m([\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmodel\u001b[39m\u001b[38;5;124m\"\u001b[39m], [\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmodel\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstream\u001b[39m\u001b[38;5;124m\"\u001b[39m])\n\u001b[1;32m    612\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate\u001b[39m(\n\u001b[1;32m    613\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    661\u001b[0m     timeout: \u001b[38;5;28mfloat\u001b[39m \u001b[38;5;241m|\u001b[39m httpx\u001b[38;5;241m.\u001b[39mTimeout \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m|\u001b[39m NotGiven \u001b[38;5;241m=\u001b[39m NOT_GIVEN,\n\u001b[1;32m    662\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ChatCompletion \u001b[38;5;241m|\u001b[39m Stream[ChatCompletionChunk]:\n\u001b[0;32m--> 663\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_post\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    664\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m/chat/completions\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m    665\u001b[0m \u001b[43m        \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaybe_transform\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    666\u001b[0m \u001b[43m            \u001b[49m\u001b[43m{\u001b[49m\n\u001b[1;32m    667\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmessages\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    668\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmodel\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    669\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfrequency_penalty\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfrequency_penalty\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    670\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfunction_call\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunction_call\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    671\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfunctions\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunctions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    672\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlogit_bias\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mlogit_bias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    673\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mlogprobs\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mlogprobs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    674\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmax_tokens\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    675\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mn\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    676\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpresence_penalty\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mpresence_penalty\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    677\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mresponse_format\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mresponse_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    678\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mseed\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mseed\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    679\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstop\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    680\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstream\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    681\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtemperature\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    682\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtool_choice\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtool_choice\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    683\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtools\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    684\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtop_logprobs\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_logprobs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    685\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtop_p\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_p\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    686\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43muser\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43muser\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    687\u001b[0m \u001b[43m            \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    688\u001b[0m \u001b[43m            \u001b[49m\u001b[43mcompletion_create_params\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mCompletionCreateParams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    689\u001b[0m \u001b[43m        \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    690\u001b[0m \u001b[43m        \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmake_request_options\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    691\u001b[0m \u001b[43m            \u001b[49m\u001b[43mextra_headers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_headers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mextra_query\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_query\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mextra_body\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_body\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\n\u001b[1;32m    692\u001b[0m \u001b[43m        \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    693\u001b[0m \u001b[43m        \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mChatCompletion\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    694\u001b[0m \u001b[43m        \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m    695\u001b[0m \u001b[43m        \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mStream\u001b[49m\u001b[43m[\u001b[49m\u001b[43mChatCompletionChunk\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    696\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m~/opt/anaconda3/envs/llama-index/lib/python3.10/site-packages/openai/_base_client.py:1200\u001b[0m, in \u001b[0;36mSyncAPIClient.post\u001b[0;34m(self, path, cast_to, body, options, files, stream, stream_cls)\u001b[0m\n\u001b[1;32m   1186\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpost\u001b[39m(\n\u001b[1;32m   1187\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m   1188\u001b[0m     path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m   1195\u001b[0m     stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m   1196\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ResponseT \u001b[38;5;241m|\u001b[39m _StreamT:\n\u001b[1;32m   1197\u001b[0m     opts \u001b[38;5;241m=\u001b[39m FinalRequestOptions\u001b[38;5;241m.\u001b[39mconstruct(\n\u001b[1;32m   1198\u001b[0m         method\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpost\u001b[39m\u001b[38;5;124m\"\u001b[39m, url\u001b[38;5;241m=\u001b[39mpath, json_data\u001b[38;5;241m=\u001b[39mbody, files\u001b[38;5;241m=\u001b[39mto_httpx_files(files), \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39moptions\n\u001b[1;32m   1199\u001b[0m     )\n\u001b[0;32m-> 1200\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m cast(ResponseT, \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mopts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\u001b[43m)\u001b[49m)\n",
      "File \u001b[0;32m~/opt/anaconda3/envs/llama-index/lib/python3.10/site-packages/openai/_base_client.py:889\u001b[0m, in \u001b[0;36mSyncAPIClient.request\u001b[0;34m(self, cast_to, options, remaining_retries, stream, stream_cls)\u001b[0m\n\u001b[1;32m    880\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrequest\u001b[39m(\n\u001b[1;32m    881\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m    882\u001b[0m     cast_to: Type[ResponseT],\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    887\u001b[0m     stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m    888\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ResponseT \u001b[38;5;241m|\u001b[39m _StreamT:\n\u001b[0;32m--> 889\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    890\u001b[0m \u001b[43m        \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    891\u001b[0m \u001b[43m        \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    892\u001b[0m \u001b[43m        \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    893\u001b[0m \u001b[43m        \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    894\u001b[0m \u001b[43m        \u001b[49m\u001b[43mremaining_retries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mremaining_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    895\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m~/opt/anaconda3/envs/llama-index/lib/python3.10/site-packages/openai/_base_client.py:980\u001b[0m, in \u001b[0;36mSyncAPIClient._request\u001b[0;34m(self, cast_to, options, remaining_retries, stream, stream_cls)\u001b[0m\n\u001b[1;32m    977\u001b[0m         err\u001b[38;5;241m.\u001b[39mresponse\u001b[38;5;241m.\u001b[39mread()\n\u001b[1;32m    979\u001b[0m     log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRe-raising status error\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 980\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_make_status_error_from_response(err\u001b[38;5;241m.\u001b[39mresponse) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m    982\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_process_response(\n\u001b[1;32m    983\u001b[0m     cast_to\u001b[38;5;241m=\u001b[39mcast_to,\n\u001b[1;32m    984\u001b[0m     options\u001b[38;5;241m=\u001b[39moptions,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    987\u001b[0m     stream_cls\u001b[38;5;241m=\u001b[39mstream_cls,\n\u001b[1;32m    988\u001b[0m )\n",
      "\u001b[0;31mBadRequestError\u001b[0m: Error code: 400 - {'error': {'message': \"OpenAIException - Error code: 400 - {'object': 'error', 'message': 'Conversation roles must alternate user/assistant/user/assistant/...', 'type': 'BadRequestError', 'param': None, 'code': 400}\", 'type': None, 'param': None, 'code': 400}}"
     ]
    }
   ],
   "source": [
    "final_answer = generate_answers_from_communities(\n",
    "        community_summaries, query, model=\"openai/Mixtral-8x7B-Instruct\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "893757e8-9272-4d2a-a3f2-b23a122521a1",
   "metadata": {},
   "outputs": [],
   "source": [
    "# answer = graph_rag_pipeline(DOCUMENTS, query)\n",
    "# print('Answer:', answer)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8847a11b-9548-4448-833d-c3eeb3ca4965",
   "metadata": {},
   "outputs": [],
   "source": [
    "final_answer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2ab8e63f-dfe2-4d50-b87f-74c77cf4f3a7",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
