{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "provenance": [],
      "toc_visible": true
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "cells": [
    {
      "cell_type": "markdown",
      "source": [
        "# import libraries and main methods"
      ],
      "metadata": {
        "id": "L0YVDwKYDzGX"
      }
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "B-Kz6-kjDjsI"
      },
      "outputs": [],
      "source": [
        "%%capture\n",
        "!apt-get install awscli -y"
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "%%capture\n",
        "!pip install ai71 rouge-score bert-score boto3==1.35.88 boto3==1.35.88 pinecone==5.4.2 opensearch-py==2.8.0"
      ],
      "metadata": {
        "id": "tGfEODLMHVrU"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "#@title setup ai71 api client\n",
        "from ai71 import AI71\n",
        "\n",
        "ai71_clients = [\n",
        "    AI71(\"<API_KEY_1>\"),\n",
        "    AI71(\"<API_KEY_2>\"),\n",
        "    AI71(\"<API_KEY_3>\"),\n",
        "    AI71(\"<API_KEY_4>\"),\n",
        "    AI71(\"<API_KEY_5>\"),\n",
        "    AI71(\"<API_KEY_6>\"),\n",
        "    AI71(\"<API_KEY_7>\"),\n",
        "    AI71(\"<API_KEY_8>\"),\n",
        "    AI71(\"<API_KEY_9>\"),\n",
        "    AI71(\"<API_KEY_10>\"),\n",
        "    AI71(\"<API_KEY_11>\")\n",
        "    ]\n",
        "len(ai71_clients)"
      ],
      "metadata": {
        "id": "82DBNNGDHlxy",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "1e7a1f41-4bc7-4ed9-8013-9a0fd7f75dfe"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "11"
            ]
          },
          "metadata": {},
          "execution_count": 4
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "#@title setup sigir-participant\n",
        "from google.colab import files\n",
        "import pandas as pd\n",
        "import os\n",
        "import boto3\n",
        "\n",
        "# Upload the AWS credentials CSV file\n",
        "aws_credentials = files.upload()\n",
        "\n",
        "# Get the filename dynamically\n",
        "filename = list(aws_credentials.keys())[0]\n",
        "\n",
        "# Read the CSV file\n",
        "aws_df = pd.read_csv(filename)\n",
        "\n",
        "# Extract AWS credentials\n",
        "aws_access_key = aws_df.iloc[0]['Access key ID']\n",
        "aws_secret_key = aws_df.iloc[0]['Secret access key']\n",
        "\n",
        "# Configure AWS CLI for the profile dynamically\n",
        "AWS_PROFILE_NAME = \"sigir-participant\"\n",
        "\n",
        "!aws configure set aws_access_key_id {aws_access_key} --profile {AWS_PROFILE_NAME}\n",
        "!aws configure set aws_secret_access_key {aws_secret_key} --profile {AWS_PROFILE_NAME}\n",
        "!aws configure set region us-east-1 --profile {AWS_PROFILE_NAME}\n",
        "\n",
        "# Set profile for boto3\n",
        "os.environ[\"AWS_PROFILE\"] = AWS_PROFILE_NAME\n",
        "\n",
        "print(\"AWS credentials loaded and profile configured successfully!\")\n",
        "\n",
        "# AWS utilities\n",
        "\n",
        "AWS_PROFILE_NAME = \"sigir-participant\"\n",
        "AWS_REGION_NAME = \"us-east-1\"\n",
        "\n",
        "def get_ssm_value(key: str, profile: str = AWS_PROFILE_NAME, region: str = AWS_REGION_NAME) -> str:\n",
        "    \"\"\"Get a cleartext value from AWS SSM.\"\"\"\n",
        "    session = boto3.Session(profile_name=profile, region_name=region)\n",
        "    ssm = session.client(\"ssm\")\n",
        "    return ssm.get_parameter(Name=key)[\"Parameter\"][\"Value\"]\n",
        "\n",
        "def get_ssm_secret(key: str, profile: str = AWS_PROFILE_NAME, region: str = AWS_REGION_NAME):\n",
        "    session = boto3.Session(profile_name=profile, region_name=region)\n",
        "    ssm = session.client(\"ssm\")\n",
        "    return ssm.get_parameter(Name=key, WithDecryption=True)[\"Parameter\"][\"Value\"]"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 90
        },
        "id": "lA7uo-NwD9Tf",
        "outputId": "d4c3cda3-ca03-4437-9e95-4de67936bc90"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<IPython.core.display.HTML object>"
            ],
            "text/html": [
              "\n",
              "     <input type=\"file\" id=\"files-0fd14b3e-05a0-4101-850f-327f1de1f5c3\" name=\"files[]\" multiple disabled\n",
              "        style=\"border:none\" />\n",
              "     <output id=\"result-0fd14b3e-05a0-4101-850f-327f1de1f5c3\">\n",
              "      Upload widget is only available when the cell has been executed in the\n",
              "      current browser session. Please rerun this cell to enable.\n",
              "      </output>\n",
              "      <script>// Copyright 2017 Google LLC\n",
              "//\n",
              "// Licensed under the Apache License, Version 2.0 (the \"License\");\n",
              "// you may not use this file except in compliance with the License.\n",
              "// You may obtain a copy of the License at\n",
              "//\n",
              "//      http://www.apache.org/licenses/LICENSE-2.0\n",
              "//\n",
              "// Unless required by applicable law or agreed to in writing, software\n",
              "// distributed under the License is distributed on an \"AS IS\" BASIS,\n",
              "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
              "// See the License for the specific language governing permissions and\n",
              "// limitations under the License.\n",
              "\n",
              "/**\n",
              " * @fileoverview Helpers for google.colab Python module.\n",
              " */\n",
              "(function(scope) {\n",
              "function span(text, styleAttributes = {}) {\n",
              "  const element = document.createElement('span');\n",
              "  element.textContent = text;\n",
              "  for (const key of Object.keys(styleAttributes)) {\n",
              "    element.style[key] = styleAttributes[key];\n",
              "  }\n",
              "  return element;\n",
              "}\n",
              "\n",
              "// Max number of bytes which will be uploaded at a time.\n",
              "const MAX_PAYLOAD_SIZE = 100 * 1024;\n",
              "\n",
              "function _uploadFiles(inputId, outputId) {\n",
              "  const steps = uploadFilesStep(inputId, outputId);\n",
              "  const outputElement = document.getElementById(outputId);\n",
              "  // Cache steps on the outputElement to make it available for the next call\n",
              "  // to uploadFilesContinue from Python.\n",
              "  outputElement.steps = steps;\n",
              "\n",
              "  return _uploadFilesContinue(outputId);\n",
              "}\n",
              "\n",
              "// This is roughly an async generator (not supported in the browser yet),\n",
              "// where there are multiple asynchronous steps and the Python side is going\n",
              "// to poll for completion of each step.\n",
              "// This uses a Promise to block the python side on completion of each step,\n",
              "// then passes the result of the previous step as the input to the next step.\n",
              "function _uploadFilesContinue(outputId) {\n",
              "  const outputElement = document.getElementById(outputId);\n",
              "  const steps = outputElement.steps;\n",
              "\n",
              "  const next = steps.next(outputElement.lastPromiseValue);\n",
              "  return Promise.resolve(next.value.promise).then((value) => {\n",
              "    // Cache the last promise value to make it available to the next\n",
              "    // step of the generator.\n",
              "    outputElement.lastPromiseValue = value;\n",
              "    return next.value.response;\n",
              "  });\n",
              "}\n",
              "\n",
              "/**\n",
              " * Generator function which is called between each async step of the upload\n",
              " * process.\n",
              " * @param {string} inputId Element ID of the input file picker element.\n",
              " * @param {string} outputId Element ID of the output display.\n",
              " * @return {!Iterable<!Object>} Iterable of next steps.\n",
              " */\n",
              "function* uploadFilesStep(inputId, outputId) {\n",
              "  const inputElement = document.getElementById(inputId);\n",
              "  inputElement.disabled = false;\n",
              "\n",
              "  const outputElement = document.getElementById(outputId);\n",
              "  outputElement.innerHTML = '';\n",
              "\n",
              "  const pickedPromise = new Promise((resolve) => {\n",
              "    inputElement.addEventListener('change', (e) => {\n",
              "      resolve(e.target.files);\n",
              "    });\n",
              "  });\n",
              "\n",
              "  const cancel = document.createElement('button');\n",
              "  inputElement.parentElement.appendChild(cancel);\n",
              "  cancel.textContent = 'Cancel upload';\n",
              "  const cancelPromise = new Promise((resolve) => {\n",
              "    cancel.onclick = () => {\n",
              "      resolve(null);\n",
              "    };\n",
              "  });\n",
              "\n",
              "  // Wait for the user to pick the files.\n",
              "  const files = yield {\n",
              "    promise: Promise.race([pickedPromise, cancelPromise]),\n",
              "    response: {\n",
              "      action: 'starting',\n",
              "    }\n",
              "  };\n",
              "\n",
              "  cancel.remove();\n",
              "\n",
              "  // Disable the input element since further picks are not allowed.\n",
              "  inputElement.disabled = true;\n",
              "\n",
              "  if (!files) {\n",
              "    return {\n",
              "      response: {\n",
              "        action: 'complete',\n",
              "      }\n",
              "    };\n",
              "  }\n",
              "\n",
              "  for (const file of files) {\n",
              "    const li = document.createElement('li');\n",
              "    li.append(span(file.name, {fontWeight: 'bold'}));\n",
              "    li.append(span(\n",
              "        `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n",
              "        `last modified: ${\n",
              "            file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n",
              "                                    'n/a'} - `));\n",
              "    const percent = span('0% done');\n",
              "    li.appendChild(percent);\n",
              "\n",
              "    outputElement.appendChild(li);\n",
              "\n",
              "    const fileDataPromise = new Promise((resolve) => {\n",
              "      const reader = new FileReader();\n",
              "      reader.onload = (e) => {\n",
              "        resolve(e.target.result);\n",
              "      };\n",
              "      reader.readAsArrayBuffer(file);\n",
              "    });\n",
              "    // Wait for the data to be ready.\n",
              "    let fileData = yield {\n",
              "      promise: fileDataPromise,\n",
              "      response: {\n",
              "        action: 'continue',\n",
              "      }\n",
              "    };\n",
              "\n",
              "    // Use a chunked sending to avoid message size limits. See b/62115660.\n",
              "    let position = 0;\n",
              "    do {\n",
              "      const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n",
              "      const chunk = new Uint8Array(fileData, position, length);\n",
              "      position += length;\n",
              "\n",
              "      const base64 = btoa(String.fromCharCode.apply(null, chunk));\n",
              "      yield {\n",
              "        response: {\n",
              "          action: 'append',\n",
              "          file: file.name,\n",
              "          data: base64,\n",
              "        },\n",
              "      };\n",
              "\n",
              "      let percentDone = fileData.byteLength === 0 ?\n",
              "          100 :\n",
              "          Math.round((position / fileData.byteLength) * 100);\n",
              "      percent.textContent = `${percentDone}% done`;\n",
              "\n",
              "    } while (position < fileData.byteLength);\n",
              "  }\n",
              "\n",
              "  // All done.\n",
              "  yield {\n",
              "    response: {\n",
              "      action: 'complete',\n",
              "    }\n",
              "  };\n",
              "}\n",
              "\n",
              "scope.google = scope.google || {};\n",
              "scope.google.colab = scope.google.colab || {};\n",
              "scope.google.colab._files = {\n",
              "  _uploadFiles,\n",
              "  _uploadFilesContinue,\n",
              "};\n",
              "})(self);\n",
              "</script> "
            ]
          },
          "metadata": {}
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Saving juli_bakagianni_accessKeys.csv to juli_bakagianni_accessKeys.csv\n",
            "AWS credentials loaded and profile configured successfully!\n"
          ]
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "# Retrieve Methods"
      ],
      "metadata": {
        "id": "WA5YqoHaKcsh"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "#@title Pinecone indexer\n",
        "\n",
        "from typing import List, Literal, Tuple\n",
        "from multiprocessing.pool import ThreadPool\n",
        "import boto3\n",
        "from pinecone import Pinecone\n",
        "import torch\n",
        "from functools import cache\n",
        "from transformers import AutoModel, AutoTokenizer\n",
        "\n",
        "PINECONE_INDEX_NAME = \"fineweb10bt-512-0w-e5-base-v2\"\n",
        "PINECONE_NAMESPACE=\"default\"\n",
        "\n",
        "@cache\n",
        "def has_mps():\n",
        "    return torch.backends.mps.is_available()\n",
        "\n",
        "@cache\n",
        "def has_cuda():\n",
        "    return torch.cuda.is_available()\n",
        "\n",
        "@cache\n",
        "def get_tokenizer(model_name: str = \"intfloat/e5-base-v2\"):\n",
        "    tokenizer = AutoTokenizer.from_pretrained(model_name)\n",
        "    return tokenizer\n",
        "\n",
        "@cache\n",
        "def get_model(model_name: str = \"intfloat/e5-base-v2\"):\n",
        "    model = AutoModel.from_pretrained(model_name, trust_remote_code=True)\n",
        "    if has_mps():\n",
        "        model = model.to(\"mps\")\n",
        "    elif has_cuda():\n",
        "        model = model.to(\"cuda\")\n",
        "    else:\n",
        "        model = model.to(\"cpu\")\n",
        "    return model\n",
        "\n",
        "def average_pool(last_hidden_states: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:\n",
        "    last_hidden = last_hidden_states.masked_fill(~attention_mask[..., None].bool(), 0.0)\n",
        "    return last_hidden.sum(dim=1) / attention_mask.sum(dim=1)[..., None]\n",
        "\n",
        "def embed_query(query: str,\n",
        "                query_prefix: str = \"query: \",\n",
        "                model_name: str = \"intfloat/e5-base-v2\",\n",
        "                pooling: Literal[\"cls\", \"avg\"] = \"avg\",\n",
        "                normalize: bool =True) -> list[float]:\n",
        "    return batch_embed_queries([query], query_prefix, model_name, pooling, normalize)[0]\n",
        "\n",
        "def batch_embed_queries(queries: List[str], query_prefix: str = \"query: \", model_name: str = \"intfloat/e5-base-v2\", pooling: Literal[\"cls\", \"avg\"] = \"avg\", normalize: bool =True) -> List[List[float]]:\n",
        "    with_prefixes = [\" \".join([query_prefix, query]) for query in queries]\n",
        "    tokenizer = get_tokenizer(model_name)\n",
        "    model = get_model(model_name)\n",
        "    with torch.no_grad():\n",
        "        encoded = tokenizer(with_prefixes, padding=True, return_tensors=\"pt\", truncation=\"longest_first\")\n",
        "        encoded = encoded.to(model.device)\n",
        "        model_out = model(**encoded)\n",
        "        match pooling:\n",
        "            case \"cls\":\n",
        "                embeddings = model_out.last_hidden_state[:, 0]\n",
        "            case \"avg\":\n",
        "                embeddings = average_pool(model_out.last_hidden_state, encoded[\"attention_mask\"])\n",
        "        if normalize:\n",
        "            embeddings = torch.nn.functional.normalize(embeddings, p=2, dim=1)\n",
        "    return embeddings.tolist()\n",
        "\n",
        "@cache\n",
        "def get_pinecone_index(index_name: str = PINECONE_INDEX_NAME):\n",
        "    pc = Pinecone(api_key=get_ssm_secret(\"/pinecone/ro_token\"))\n",
        "    index = pc.Index(name=index_name)\n",
        "    return index\n",
        "\n",
        "def query_pinecone(query: str, top_k: int = 10, namespace: str = PINECONE_NAMESPACE) -> dict:\n",
        "    index = get_pinecone_index()\n",
        "    results = index.query(\n",
        "        vector=embed_query(query),\n",
        "        top_k=top_k,\n",
        "        include_values=False,\n",
        "        namespace=namespace,\n",
        "        include_metadata=True\n",
        "    )\n",
        "\n",
        "    return results\n",
        "\n",
        "def batch_query_pinecone(queries: list[str], top_k: int = 10, namespace: str = PINECONE_NAMESPACE, n_parallel: int = 10) -> list[dict]:\n",
        "    \"\"\"Batch query a Pinecone index and return the results.\n",
        "\n",
        "    Internally uses a ThreadPool to parallelize the queries.\n",
        "    \"\"\"\n",
        "    index = get_pinecone_index()\n",
        "    embeds = batch_embed_queries(queries)\n",
        "    pool = ThreadPool(n_parallel)\n",
        "    results = pool.map(lambda x: index.query(vector=x, top_k=top_k, include_values=False, namespace=namespace, include_metadata=True), embeds)\n",
        "    return results\n",
        "\n",
        "def show_pinecone_results(results):\n",
        "    for match in results[\"matches\"]:\n",
        "        print(\"chunk:\", match[\"id\"], \"score:\", match[\"score\"])\n",
        "        print(match[\"metadata\"][\"text\"])\n",
        "        print()\n",
        "\n",
        "def get_pinecone_results(results):\n",
        "  results_list = []\n",
        "  for match in results[\"matches\"]:\n",
        "    res_dict = {}\n",
        "    res_dict[\"chunk_id\"] = match[\"id\"]\n",
        "    res_dict[\"doc_id\"] = match[\"metadata\"][\"doc_id\"]\n",
        "    res_dict[\"score\"] = match[\"score\"]\n",
        "    res_dict[\"text\"] = match[\"metadata\"][\"text\"]\n",
        "    res_dict[\"is_first_chunk\"] = match[\"metadata\"][\"is_first_chunk\"]\n",
        "    res_dict[\"is_last_chunk\"] = match[\"metadata\"][\"is_last_chunk\"]\n",
        "\n",
        "    results_list.append(res_dict)\n",
        "\n",
        "  return results_list"
      ],
      "metadata": {
        "id": "dH6fW-xBEYDk"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "#@title OpenSearch indexer\n",
        "from functools import cache\n",
        "from opensearchpy import OpenSearch, AWSV4SignerAuth, RequestsHttpConnection\n",
        "\n",
        "OPENSEARCH_INDEX_NAME = \"fineweb10bt-512-0w-e5-base-v2\"\n",
        "\n",
        "@cache\n",
        "def get_client(profile: str = AWS_PROFILE_NAME, region: str = AWS_REGION_NAME):\n",
        "    credentials = boto3.Session(profile_name=profile).get_credentials()\n",
        "    auth = AWSV4SignerAuth(credentials, region=region)\n",
        "    host_name = get_ssm_value(\"/opensearch/endpoint\", profile=profile, region=region)\n",
        "    aos_client = OpenSearch(\n",
        "        hosts=[{\"host\": host_name, \"port\": 443}],\n",
        "        http_auth=auth,\n",
        "        use_ssl=True,\n",
        "        verify_certs=True,\n",
        "        connection_class=RequestsHttpConnection,\n",
        "    )\n",
        "    return aos_client\n",
        "\n",
        "def query_opensearch(query: str, top_k: int = 10) -> dict:\n",
        "    \"\"\"Query an OpenSearch index and return the results.\"\"\"\n",
        "    client = get_client()\n",
        "    results = client.search(index=OPENSEARCH_INDEX_NAME, body={\"query\": {\"match\": {\"text\": query}}, \"size\": top_k})\n",
        "    return results\n",
        "\n",
        "def batch_query_opensearch(queries: list[str], top_k: int = 10, n_parallel: int = 10, timeout: str = None) -> list[dict]:\n",
        "    \"\"\"Sends a list of queries to OpenSearch and returns the results. Configuration of Connection Timeout might be needed for serving large batches of queries\"\"\"\n",
        "    client = get_client()\n",
        "    request = []\n",
        "    for query in queries:\n",
        "        req_head = {\"index\": OPENSEARCH_INDEX_NAME}\n",
        "        req_body = {\n",
        "            \"query\": {\n",
        "                \"multi_match\": {\n",
        "                    \"query\": query,\n",
        "                    \"fields\": [\"text\"],\n",
        "                }\n",
        "            },\n",
        "            \"size\": top_k,\n",
        "        }\n",
        "        request.extend([req_head, req_body])\n",
        "\n",
        "    # Only set the timeout if it's provided by the user\n",
        "    if timeout:\n",
        "        return client.msearch(body=request, timeout=timeout)\n",
        "    else:\n",
        "        return client.msearch(body=request)  # No timeout set, will use the default\n",
        "\n",
        "\n",
        "\n",
        "def show_opensearch_results(results: dict):\n",
        "    for match in results[\"hits\"][\"hits\"]:\n",
        "        print(\"chunk:\", match[\"_id\"], \"score:\", match[\"_score\"])\n",
        "        print(match[\"_source\"][\"text\"])\n",
        "        print()\n",
        "\n",
        "def get_opensearch_results(results: dict):\n",
        "  results_list = []\n",
        "  for match in results[\"hits\"][\"hits\"]:\n",
        "    res_dict = {}\n",
        "    res_dict[\"chunk_id\"] = match[\"_id\"]\n",
        "    res_dict[\"doc_id\"] = match[\"_source\"][\"doc_id\"]\n",
        "    res_dict[\"score\"] = match[\"_score\"]\n",
        "    res_dict[\"text\"] = match[\"_source\"][\"text\"]\n",
        "    res_dict[\"is_first_chunk\"] = match[\"_source\"][\"is_first_chunk\"]\n",
        "    res_dict[\"is_last_chunk\"] = match[\"_source\"][\"is_last_chunk\"]\n",
        "    res_dict[\"chunk_order\"] = match[\"_source\"][\"chunk_order\"]\n",
        "    res_dict[\"total_doc_chunks\"] = match[\"_source\"][\"total_doc_chunks\"]\n",
        "\n",
        "    results_list.append(res_dict)\n",
        "\n",
        "  return results_list"
      ],
      "metadata": {
        "id": "Uyy2Z_kxEeN1"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "#@title Retrieve class\n",
        "import pandas as pd\n",
        "from collections import defaultdict\n",
        "\n",
        "class Retriever:\n",
        "    def __init__(self, qa_df, top_k=1000, rrf_k=60, dense_ind=None, sparse_ind=None):\n",
        "        self.qa_df = qa_df\n",
        "        self.top_k = top_k\n",
        "        self.rrf_k = rrf_k\n",
        "        self.questions = qa_df[\"question\"].tolist()\n",
        "        self.dense_ind = dense_ind\n",
        "        self.sparse_ind = sparse_ind\n",
        "\n",
        "    def retrieve(self, indexer=\"pinecone\"):\n",
        "        if indexer == \"pinecone\":\n",
        "            batch_results = batch_query_pinecone(self.questions, top_k=self.top_k)\n",
        "            retrieved_docs = [get_pinecone_results(results) for results in batch_results]\n",
        "\n",
        "        elif indexer == \"opensearch\":\n",
        "            batch_results = batch_query_opensearch(self.questions, top_k=self.top_k, timeout=480)\n",
        "            retrieved_docs = [get_opensearch_results(results) for results in batch_results['responses']]\n",
        "\n",
        "        elif indexer == \"merged\":\n",
        "            dense = self.dense_ind\n",
        "            sparse = self.sparse_ind\n",
        "\n",
        "            return self.reciprocal_rank_fusion(dense, sparse)\n",
        "\n",
        "        else:\n",
        "            raise ValueError(f\"Unsupported indexer: {indexer}\")\n",
        "\n",
        "        return self._flatten_results(retrieved_docs)\n",
        "\n",
        "    def reciprocal_rank_fusion(self, dense_df: pd.DataFrame, sparse_df: pd.DataFrame, topn=100):\n",
        "      all_merged_results = []\n",
        "\n",
        "      for query_id in tqdm(dense_df['query_id'].unique()):\n",
        "          dense_q = dense_df[dense_df['query_id'] == query_id].reset_index(drop=True)\n",
        "          sparse_q = sparse_df[sparse_df['query_id'] == query_id].reset_index(drop=True)\n",
        "\n",
        "          rrf_scores = defaultdict(float)\n",
        "\n",
        "          for rank, row in dense_q.iterrows():\n",
        "              rrf_scores[row['chunk_id']] += 1 / (self.rrf_k + rank + 1)\n",
        "\n",
        "          for rank, row in sparse_q.iterrows():\n",
        "              rrf_scores[row['chunk_id']] += 1 / (self.rrf_k + rank + 1)\n",
        "\n",
        "          ranked_chunks = sorted(rrf_scores.items(), key=lambda item: item[1], reverse=True)\n",
        "          topn_ranked_chunks = ranked_chunks[:topn]\n",
        "\n",
        "          seen_chunk_ids = set()\n",
        "          merged_results = []\n",
        "          combined = pd.concat([dense_q, sparse_q], ignore_index=True)\n",
        "\n",
        "          for chunk_id, score in topn_ranked_chunks:\n",
        "              if chunk_id in seen_chunk_ids:\n",
        "                  continue\n",
        "              chunk_info = combined[combined['chunk_id'] == chunk_id].iloc[0]\n",
        "              seen_chunk_ids.add(chunk_id)\n",
        "              merged_results.append({\n",
        "                  'id': chunk_info['query_id'],\n",
        "                  'doc_id': chunk_info['doc_id'],\n",
        "                  'text': chunk_info['text'],\n",
        "                  'chunk_id': chunk_id,\n",
        "                  'score': score\n",
        "              })\n",
        "          all_merged_results.append(pd.DataFrame(merged_results))\n",
        "\n",
        "      return pd.concat(all_merged_results, ignore_index=True)\n",
        "\n",
        "    def _flatten_results(self, retrieved_docs):\n",
        "        flattened = []\n",
        "        for query_idx, docs in enumerate(retrieved_docs):\n",
        "            for doc in docs:\n",
        "                doc['query_id'] = query_idx\n",
        "                doc['question'] = self.questions[query_idx]\n",
        "                flattened.append(doc)\n",
        "        return pd.DataFrame(flattened)\n",
        "\n",
        "    def save_results(self, df, filename):\n",
        "        df.to_csv(filename, index=False)\n"
      ],
      "metadata": {
        "id": "mbMcnlAtFLsH"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "source": [
        "#Augmentation Methods"
      ],
      "metadata": {
        "id": "s8DyYf83KglM"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "#@title Augmenter Generator\n",
        "from time import sleep\n",
        "from concurrent.futures import ThreadPoolExecutor, as_completed\n",
        "import threading\n",
        "\n",
        "class ClientPool:\n",
        "    def __init__(self, clients):\n",
        "        self.clients = clients\n",
        "        self.index = 0\n",
        "        self.lock = threading.Lock()\n",
        "\n",
        "    def get_next(self):\n",
        "        with self.lock:\n",
        "            client = self.clients[self.index]\n",
        "            self.index = (self.index + 1) % len(self.clients)\n",
        "            return client\n",
        "\n",
        "class AugmentedGenerator:\n",
        "    def __init__(self, model: str, clients=ai71_clients, max_tokens=300, rpm_limit=500):\n",
        "        self.client_pool = ClientPool(clients)\n",
        "        self.model = model\n",
        "        self.max_tokens = max_tokens\n",
        "        self.n_parallel = 10\n",
        "\n",
        "    def _build_final_prompt(self, question, passages):\n",
        "        start = (\n",
        "            \"Answer the question using only the context below. Do not make up any new information.\\n\"\n",
        "            \"If no part of the answer is found in the context, respond only with: 'I don't know.'\\n\"\n",
        "            \"If only part of the answer is found, include that part in a complete sentence that uses the phrasing of the question, and state that the rest is not available in the context.\\n\"\n",
        "            \"If the full answer is found, respond with a complete sentence that includes the phrasing of the question.\\n\\n\"\n",
        "            \"Context:\\n\"\n",
        "        )\n",
        "        end = f\"\\n\\nQuestion: {question}\\nAnswer:\"\n",
        "        sep = \"\\n---\\n\"\n",
        "        combined = sep.join(passages)\n",
        "\n",
        "        return start + combined + end\n",
        "\n",
        "\n",
        "    def generate(self, question, passages, n_retries=5, return_prompt=False):\n",
        "        def complete(prompt):\n",
        "            for attempt in range(n_retries):\n",
        "                try:\n",
        "                    client = self.client_pool.get_next()\n",
        "                    res = client.chat.completions.create(\n",
        "                        model=self.model,\n",
        "                        messages=[\n",
        "                            {\"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n",
        "                            {\"role\": \"user\", \"content\": prompt},\n",
        "                        ],\n",
        "                        max_tokens=self.max_tokens,\n",
        "                        temperature=0.4,\n",
        "                        top_p=0.95\n",
        "                    )\n",
        "                    return res.choices[0].message.content.strip().replace(\"\\nUser:\", \"\")\n",
        "                except Exception as e:\n",
        "                    print(f\"Error: {e} Retry {attempt}\")\n",
        "                    sleep(20)\n",
        "            return 'No response due to error'\n",
        "\n",
        "        prompt = self._build_final_prompt(question, passages)\n",
        "        answer = complete(prompt)\n",
        "        return (answer, prompt) if return_prompt else answer\n",
        "\n",
        "    def _aggregate_passages(self, question, passages_list, aggregate_n):\n",
        "        all_passages = [p for sublist in passages_list for p in sublist]\n",
        "        grouped_passages = [all_passages[i:i + aggregate_n] for i in range(0, len(all_passages), aggregate_n)]\n",
        "        grouped_questions = [question] * len(grouped_passages)\n",
        "        return grouped_questions, grouped_passages\n",
        "\n",
        "    def _build_summaries_prompt(self, question, passages):\n",
        "        start = (\n",
        "            \"Answer the question using only the context below. Do not make up any new information.\\n\"\n",
        "            \"If no part of the answer is found in the context, respond only with: 'I don't know.'\\n\"\n",
        "            \"If only part of the answer is found, include that part in a complete sentence that uses the phrasing of the question, and state that the rest is not available in the context.\\n\"\n",
        "            \"If the full answer is found, respond with a complete sentence that includes the phrasing of the question.\\n\\n\"\n",
        "            \"Context:\\n\"\n",
        "        )\n",
        "        end = f\"\\n\\nQuestion: {question}\\nAnswer:\"\n",
        "        sep = \"\\n---\\n\"\n",
        "        combined = sep.join(passages)\n",
        "        return start + combined + end\n",
        "\n",
        "\n",
        "    def generate_batch(self, question, passages_list, n_retries=2, return_prompt=False):\n",
        "        from math import ceil\n",
        "\n",
        "        def build_messages(q, passages):\n",
        "            messages, prompts = [], []\n",
        "            for p in passages:\n",
        "              prompt = self._build_summaries_prompt(q, p)\n",
        "              prompts.append(prompt)\n",
        "              messages.append([\n",
        "                    {\"role\": \"system\", \"content\": \"You are helping answer a specific question using only the context provided below.\"},\n",
        "                    {\"role\": \"user\", \"content\": prompt}\n",
        "                ])\n",
        "            return messages, prompts\n",
        "\n",
        "        def complete_request(client, messages, idx):\n",
        "            for attempt in range(n_retries):\n",
        "                try:\n",
        "                    res = client.chat.completions.create(\n",
        "                        model=self.model,\n",
        "                        messages=messages,\n",
        "                        max_tokens=self.max_tokens,\n",
        "                        temperature=0.4,\n",
        "                        top_p=0.95\n",
        "                    )\n",
        "                    return idx, res.choices[0].message.content.strip().replace(\"\\nUser:\", \"\")\n",
        "                except Exception as e:\n",
        "                    print(f\"Error: {e} | Request {idx} | Retry {attempt}\")\n",
        "                    sleep(5)\n",
        "            return idx, f\"No response due to error\"\n",
        "\n",
        "        prompts = []\n",
        "        results = [None] * len(passages_list)\n",
        "\n",
        "        # Step 1: Split the passages into\n",
        "        all_batches = [passages_list[i:i + self.n_parallel] for i in range(0, len(passages_list), self.n_parallel)]\n",
        "        # Step 2: Distribute the requests across 8 clients\n",
        "        with ThreadPoolExecutor(max_workers=10) as executor:  # 80 = 8 clients * 10 parallel requests\n",
        "            futures = []\n",
        "\n",
        "            for idx, p in enumerate(all_batches):\n",
        "                batch_messages, batch_prompts = build_messages(question, p)\n",
        "                client = self.client_pool.get_next()\n",
        "\n",
        "                for m in batch_messages:\n",
        "                    futures.append(executor.submit(complete_request, client, m, idx))\n",
        "\n",
        "            # Wait for all requests to finish and collect results\n",
        "            for future in as_completed(futures):\n",
        "                idx, result = future.result()\n",
        "                results[idx] = result\n",
        "\n",
        "        return (results, prompts) if return_prompt else results\n"
      ],
      "metadata": {
        "id": "3tj7J8xUTQfS"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "source": [
        "# Clustering Methods"
      ],
      "metadata": {
        "id": "Axu8j2kUKoUT"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "#@title SampleSilhouette\n",
        "from sklearn.cluster import KMeans, MiniBatchKMeans\n",
        "from sklearn.metrics import silhouette_samples\n",
        "from sklearn.feature_extraction.text import TfidfVectorizer\n",
        "import numpy as np\n",
        "import pandas as pd\n",
        "from tqdm import tqdm\n",
        "from sklearn.decomposition import TruncatedSVD\n",
        "\n",
        "np.random.seed=42\n",
        "\n",
        "def control_sample(X, clustering, balanced=False, sample_size=-1):\n",
        "    \"\"\"\n",
        "    Sample data points from the dataset based on the specified strategy.\n",
        "\n",
        "    This function samples data points based on the sample strategy:\n",
        "    - 'uniform': Randomly samples 'sample_size' number of rows from the entire dataset.\n",
        "    - 'balanced': Samples a specified number of rows per cluster.\n",
        "\n",
        "    Parameters:\n",
        "    X (array-like): 2D array-like structure with the coordinates of the data points.\n",
        "    clustering (array-like): Cluster labels for the data points.\n",
        "    balanced (bool): If True, samples a specified number of rows per cluster.\n",
        "                     If False, samples 'sample_size' number of rows uniformly from the entire dataset.\n",
        "    sample_size (int): Total number of samples to draw. If sample_size is less than 1, the entire dataset is returned.\n",
        "\n",
        "    Returns:\n",
        "    tuple: Two arrays, one with the sampled data points and another with the corresponding cluster labels.\n",
        "    \"\"\"\n",
        "    if sample_size < 1:\n",
        "        return X, clustering\n",
        "\n",
        "    X = np.array(X)\n",
        "    clustering = np.array(clustering)\n",
        "\n",
        "    if not balanced:\n",
        "        indices = np.random.choice(len(X), size=sample_size, replace=False)\n",
        "        return X[indices], clustering[indices]\n",
        "\n",
        "    # Otherwise: sample n rows per cluster\n",
        "    unique_clusters, counts = np.unique(clustering, return_counts=True)\n",
        "    samples_per_cluster = max(1, sample_size // len(unique_clusters))\n",
        "\n",
        "    sampled_X = []\n",
        "    sampled_clustering = []\n",
        "\n",
        "    for cluster in unique_clusters:\n",
        "        cluster_indices = np.where(clustering == cluster)[0]\n",
        "        sample_indices = np.random.choice(cluster_indices, size=min(len(cluster_indices), samples_per_cluster), replace=False)\n",
        "        sampled_X.append(X[sample_indices])\n",
        "        sampled_clustering.append(clustering[sample_indices])\n",
        "\n",
        "    sampled_X = np.vstack(sampled_X)\n",
        "    sampled_clustering = np.hstack(sampled_clustering)\n",
        "\n",
        "    return sampled_X, sampled_clustering\n",
        "\n",
        "\n",
        "class SampleSilhouette:\n",
        "\n",
        "  def __init__(self, X, clustering, sample_size=-1, sample_strategy='micro'):\n",
        "    self.X, self.clustering = control_sample(X, clustering, balanced=sample_strategy=='macro', sample_size=sample_size)\n",
        "    self.point_sil = pd.DataFrame({'sil': silhouette_samples(self.X, self.clustering), 'label':self.clustering})\n",
        "\n",
        "\n",
        "  def macro(self):\n",
        "    \"\"\"\n",
        "    Macro-averaged silhouette, for per-cluster averaging, then returning their mean\n",
        "    :return: the silhouette aggregated score\n",
        "    \"\"\"\n",
        "    representatives = self.point_sil.groupby('label').sil.apply(np.mean)\n",
        "    return representatives.mean()\n",
        "\n",
        "  def micro(self):\n",
        "    \"\"\"\n",
        "    Micro-averaged silhouette, as in sklearn\n",
        "    :return: the silhouette aggregated score\n",
        "    \"\"\"\n",
        "    return self.point_sil.sil.mean()"
      ],
      "metadata": {
        "id": "Ti_x374bHlk9"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "#@title TopClustRAG\n",
        "import numpy as np\n",
        "import math\n",
        "import pandas as pd\n",
        "from sentence_transformers import CrossEncoder\n",
        "from time import time\n",
        "from sklearn.cluster import MiniBatchKMeans, KMeans\n",
        "from tqdm import tqdm\n",
        "from joblib import Parallel, delayed\n",
        "from typing import List, Tuple\n",
        "import json\n",
        "from sklearn.decomposition import TruncatedSVD\n",
        "from scipy.sparse import csr_matrix\n",
        "\n",
        "# Load once\n",
        "reranker = CrossEncoder(\"cross-encoder/ms-marco-TinyBERT-L-6\")\n",
        "\n",
        "def convert_to_jsonl(records, filename):\n",
        "    with open(filename, 'w') as f:\n",
        "        for record in records:\n",
        "            json.dump(record, f)\n",
        "            f.write('\\n')\n",
        "\n",
        "\n",
        "def cluster_passages_opt_k(topn_df: pd.DataFrame, embeddings: np.ndarray, k_range=range(10, 100, 5)):\n",
        "    \"\"\"\n",
        "    Clusters passages using Means into batches based on embedding similarity,\n",
        "    and sorts passages within each batch by their score (descending), keeping the top 5 passages per cluster.\n",
        "    \"\"\"\n",
        "    best_score = -1\n",
        "    best_k, best_labels = 0, []\n",
        "\n",
        "    for k in k_range:\n",
        "        kmeans = KMeans(n_clusters=k, random_state=42, n_init=\"auto\")\n",
        "        labels = kmeans.fit_predict(embeddings)\n",
        "        macro = SampleSilhouette(embeddings, labels, sample_size=10 * k, sample_strategy='macro').macro()\n",
        "        if macro > best_score:\n",
        "            best_score = macro\n",
        "            best_k = k\n",
        "            best_labels = labels\n",
        "\n",
        "    clustered_batches = {}\n",
        "    passages, doc_ids, scores = topn_df['text'].tolist(), topn_df['doc_id'].tolist(), topn_df['score'].tolist()\n",
        "\n",
        "    for label, passage, doc_id, score in zip(best_labels, passages, doc_ids, scores):\n",
        "        clustered_batches.setdefault(label, []).append((passage, doc_id, score))\n",
        "\n",
        "    passage_batches, doc_id_batches = [], []\n",
        "    for batch in clustered_batches.values():\n",
        "        sorted_batch = sorted(batch, key=lambda x: x[2], reverse=True)[:5]\n",
        "        passages_batch, doc_ids_batch, _ = zip(*sorted_batch)\n",
        "        passage_batches.append(list(passages_batch))\n",
        "        doc_id_batches.append(list(doc_ids_batch))\n",
        "    return passage_batches, doc_id_batches, best_k\n",
        "\n",
        "def rerank_passages(question: str, passages: List[str]) -> List[str]:\n",
        "    \"\"\"\n",
        "    Rerank passages using CrossEncoder based on relevance to the question.\n",
        "    \"\"\"\n",
        "    if len(passages) == 0:\n",
        "        return []\n",
        "\n",
        "    pairs = [(question, passage) for passage in passages]\n",
        "    scores = reranker.predict(pairs)\n",
        "\n",
        "    # Sort passages by score descending\n",
        "    sorted_passages = [p for _, p in sorted(zip(scores, passages), reverse=True)]\n",
        "    return sorted_passages\n",
        "\n",
        "def process_single_query(query_id: str, group: pd.DataFrame,\n",
        "                         embedding_map: np.ndarray, k_range,\n",
        "                         generator, reranker, top_n=100, n_parallel=20):\n",
        "    \"\"\"\n",
        "    Process a single query, including clustering, generation, and evaluation.\n",
        "    \"\"\"\n",
        "    question = group.iloc[0]['question']\n",
        "    # gold_answer = group.iloc[0]['gold_answer']\n",
        "\n",
        "    # Get top-n passages and doc_ids\n",
        "    topn_df, embeddings = group, embedding_map\n",
        "    # topn_df, embeddings = get_top_n_passages(group, embedding_map, top_n=top_n)\n",
        "\n",
        "    start = time()\n",
        "    # Cluster passages using KMeans\n",
        "    passage_batches, doc_id_batches, best_k = cluster_passages_opt_k(topn_df, embeddings, k_range=k_range)\n",
        "    end = time()\n",
        "    print(f\"\\n  → Clusterer took {end - start:.2f} seconds | best k = {best_k}\")\n",
        "\n",
        "    # Initialize dictionaries to track doc_id to batch information\n",
        "    docid_to_batch_info = {\n",
        "        doc_id: (batch_idx, pos)\n",
        "        for batch_idx, doc_ids in enumerate(doc_id_batches)\n",
        "        for pos, doc_id in enumerate(doc_ids)\n",
        "    }\n",
        "\n",
        "    start = time()\n",
        "    # Generate responses in batches\n",
        "    batch_responses = generator.generate_batch(question, passage_batches, return_prompt=False)\n",
        "    end = time()\n",
        "    print(f\"  → Generator took {end - start:.2f} seconds\")\n",
        "\n",
        "    # Map doc_ids to generated responses\n",
        "    docid_to_response = {}\n",
        "    for doc_ids, response in zip(doc_id_batches, batch_responses):\n",
        "        for doc_id in doc_ids:\n",
        "            docid_to_response[doc_id] = response\n",
        "\n",
        "    # Filter informative responses\n",
        "    not_relevant_phrase = \"I don't know.\"\n",
        "    batch_df = pd.DataFrame({\n",
        "            'passage': batch_responses,\n",
        "            'doc_IDs': doc_id_batches\n",
        "        })\n",
        "    dedup_df = batch_df[batch_df['passage'] != not_relevant_phrase][['passage', 'doc_IDs']]\n",
        "    # Set 'passage' as index to group by it without including it implicitly\n",
        "    inform_df = (\n",
        "        dedup_df.set_index('passage')\n",
        "                .groupby(level=0, group_keys=False)\n",
        "                .apply(lambda g: g.head(2))\n",
        "                .reset_index()\n",
        "    )\n",
        "\n",
        "    start = time()\n",
        "    # Rerank passages based on relevance to the question\n",
        "    sorted_passages = rerank_passages(question, inform_df['passage'].values)\n",
        "    end = time()\n",
        "    print(f\"  → Reranker took {end - start:.2f} seconds\")\n",
        "\n",
        "    start = time()\n",
        "    # Generate final response\n",
        "    final_resp, final_prompt = generator.generate(question, sorted_passages, return_prompt=True)\n",
        "    end = time()\n",
        "    print(f\"  → Final prompt took {end - start:.2f} seconds\")\n",
        "\n",
        "    # Record the results\n",
        "    record = {\n",
        "        'id': query_id,\n",
        "        'question': question,\n",
        "        'passages': inform_df.to_dict('records'),\n",
        "        'final_prompt': final_prompt,\n",
        "        'answer': final_resp\n",
        "    }\n",
        "\n",
        "    return record\n",
        "\n",
        "\n",
        "def run_TopClustRAG(df: pd.DataFrame, generator, reranker, n_parallel=10, k_range=range(30, 100, 10)):\n",
        "    \"\"\"\n",
        "    Run the TopClustRAG pipeline in parallel over queries.\n",
        "    \"\"\"\n",
        "    results = []\n",
        "    buffer = []\n",
        "    batch_num = 0\n",
        "\n",
        "    for group_num, (query_id, group) in enumerate(tqdm(df.groupby('id'))):\n",
        "      try:\n",
        "        start_time = time()\n",
        "        group_texts = group['text'].tolist()\n",
        "        vectorizer = TfidfVectorizer()\n",
        "        group_embeddings = vectorizer.fit_transform(group_texts)\n",
        "        duration = time() - start_time\n",
        "        print(f\"Tf-Idf embeddings generated in {duration:.2f} seconds\")\n",
        "\n",
        "        start_time = time()\n",
        "        # Step 1: Reduce dimensions from TF-IDF (e.g. 10,000+ → 100–300)\n",
        "        svd = TruncatedSVD(n_components=100, random_state=42)  # adjust as needed\n",
        "        reduced_embeddings = svd.fit_transform(csr_matrix(group_embeddings))\n",
        "        duration = time() - start_time\n",
        "        print(f\"Reduced dimensions in {duration:.2f} seconds\")\n",
        "\n",
        "        res = process_single_query(query_id=query_id, group=group, embedding_map=reduced_embeddings,\n",
        "                                  generator=generator, reranker=reranker, k_range=k_range)\n",
        "        results.append(res)\n",
        "\n",
        "        buffer.append(res)\n",
        "\n",
        "        # Save batch every 50 groups\n",
        "        if (group_num + 1) % 50 == 0:\n",
        "            batch_file = f\"batch_{batch_num}_live.jsonl\"\n",
        "            convert_to_jsonl(buffer, batch_file)\n",
        "            print(f\"✅ Saved batch {batch_num} to {batch_file}\")\n",
        "            batch_num += 1\n",
        "      except Exception as e:\n",
        "            print(f\"  ⚠️ Skipping query {query_id} due to error: {e}\")\n",
        "            continue\n",
        "\n",
        "    final_file = \"final_live.jsonl\"\n",
        "    convert_to_jsonl(results, final_file)\n",
        "    # Final evaluation dataframe\n",
        "    return pd.DataFrame(results)\n"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 368,
          "referenced_widgets": [
            "442ee662e479498d94ee05275df58b71",
            "0e11110e50c74deebd01fdb4192be969",
            "4b6b89c8b8fb4bbd9fc199a469bd07cf",
            "f58d8ca973ee43a2b7435b0bda07542a",
            "e4120903cb484408899514895c8cdc42",
            "e9a996f785b54849b7a6b1658c00138a",
            "c043b9dc7dfe4553ad1aadcfd7c9a7c7",
            "f068bd73069b4b519f77447a49bdc809",
            "f144af4323484ae1b50a82620e1a8c54",
            "03b65a4052644ca9af47d56f9000686a",
            "dd61f82b2bc74970900ac01f2adb42d6",
            "45cdd4ba8aba467bbae05c5aec1f854b",
            "0ef29494580144b6938d1b0ea8c82a94",
            "3f2a6c93ea564285b5aae77777e13272",
            "38e1489bf5ea46cf9902677cbc989ba5",
            "f39bcac6666a4eb096a252e167011bfd",
            "1cfc2a4f52e846e8bc05269aaa262f1c",
            "c730cabff73a44c390f9f0529ba9c4df",
            "344b88ac24934c32b714a17e10e7c122",
            "25baeca7ef134ed494d13f3adbebb925",
            "599d104e05524363bddab09738f6e3af",
            "b325dfcbbebe4447a7036b41c772982e",
            "b38564872a1b47c0a86b545b897d7055",
            "2e2ba82fba38444c9212a3a76c4b1f4f",
            "66624544dc8b456e8362e3464c736e66",
            "620832ac579e43099b661be2669b9695",
            "554f88ef998a453f96d3293c8a427186",
            "dbc230801c024bf38984a82fe29b678d",
            "664425c0efcc4e3d8e5cc57306a6c17f",
            "060b4068546d4c77bd9b829da7f97863",
            "8b6bd5a6ffc64c4ca83573243b04ce33",
            "5c4f4bd492524b53abdab48f2d21a2c9",
            "f29fd23a71044892b91efd227532f550",
            "cecc2005ecc5433bb1bfaa819a021d73",
            "146e7d9d35d44f11aa6be02801c9429f",
            "0dff1aca50ea4794802dd1659eb8e21a",
            "aeca2870473345cc83858ffb39fb448e",
            "92b9fd396b664a59b81a3ee8ef60c5b6",
            "450d439712974b2da5763c58e045b35b",
            "203d8fafcef24d00baecdf9ffb27dd86",
            "90bea3878b204e308969e94ebfc75008",
            "7675698d48d045e3969f5e79ef994e56",
            "6b1fee5d6c1b4d2593018b0ef0098379",
            "074abba42eaa4e3d8e93ee136a7c360e",
            "3b65467ab02041778907c16b9353f359",
            "3d4551530fef435b8e9f4250091bb1ee",
            "d2513022118542cfbe82389e7ea58e21",
            "941b2d5a5cb54b198606d5214b0926e5",
            "1bdc83f6d97c4af78ed930ed7f3fde32",
            "bcf85b7a91504b65bd1975918e8294f8",
            "035d2da68f024ac2add62b1680c72987",
            "3799f980eb644c6382a7277b87913cc3",
            "638efc577fe14e778944698c916999d3",
            "8f6c20e3a6f743f9814a53357ac726e3",
            "c3534a11d7964913b1afa5e4971013fc",
            "5634fe97abc3422dada8637f5f60e421",
            "895e8faf495a4ad9b1fae2673d9f49c0",
            "9a08fd1b9cec4dcab717e5ecd22f8f14",
            "da12576f393048499ebf38ed9e5e2539",
            "0e6e7ab8fb6b468aade3f46469ca6f3c",
            "ea8d09b161a94f16a45e3fde7bb53515",
            "539e1ba4441a4ca19781bc8501b19393",
            "5f67e0a781b34912aad4c9075f0952cc",
            "8ca0ebf769f24ab3ad236d6f5664ba6b",
            "70e7df19620848f989e3329537d65c3e",
            "aacc083fcab94234a711c863496b9151"
          ]
        },
        "collapsed": true,
        "id": "2LqByQ7Ot9h7",
        "outputId": "a7a9a931-4f9f-4261-d5fd-3e9aa79f02ab"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "/usr/local/lib/python3.11/dist-packages/huggingface_hub/utils/_auth.py:94: UserWarning: \n",
            "The secret `HF_TOKEN` does not exist in your Colab secrets.\n",
            "To authenticate with the Hugging Face Hub, create a token in your settings tab (https://huggingface.co/settings/tokens), set it as secret in your Google Colab and restart your session.\n",
            "You will be able to reuse this secret in all of your notebooks.\n",
            "Please note that authentication is recommended but still optional to access public models or datasets.\n",
            "  warnings.warn(\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "config.json:   0%|          | 0.00/612 [00:00<?, ?B/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "442ee662e479498d94ee05275df58b71"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "Xet Storage is enabled for this repo, but the 'hf_xet' package is not installed. Falling back to regular HTTP download. For better performance, install the package with: `pip install huggingface_hub[hf_xet]` or `pip install hf_xet`\n",
            "WARNING:huggingface_hub.file_download:Xet Storage is enabled for this repo, but the 'hf_xet' package is not installed. Falling back to regular HTTP download. For better performance, install the package with: `pip install huggingface_hub[hf_xet]` or `pip install hf_xet`\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "model.safetensors:   0%|          | 0.00/268M [00:00<?, ?B/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "45cdd4ba8aba467bbae05c5aec1f854b"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "tokenizer_config.json:   0%|          | 0.00/1.33k [00:00<?, ?B/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "b38564872a1b47c0a86b545b897d7055"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "vocab.txt:   0%|          | 0.00/232k [00:00<?, ?B/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "cecc2005ecc5433bb1bfaa819a021d73"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "tokenizer.json:   0%|          | 0.00/711k [00:00<?, ?B/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "3b65467ab02041778907c16b9353f359"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "special_tokens_map.json:   0%|          | 0.00/132 [00:00<?, ?B/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "5634fe97abc3422dada8637f5f60e421"
            }
          },
          "metadata": {}
        }
      ]
    },
    {
      "cell_type": "markdown",
      "source": [
        "#LiveRAG run topClustRAG"
      ],
      "metadata": {
        "id": "ByG2mkIhyZOz"
      }
    },
    {
      "cell_type": "code",
      "source": [
        "# @title upload test qa data\n",
        "\n",
        "from google.colab import files\n",
        "import pandas as pd\n",
        "\n",
        "uploaded = files.upload()\n",
        "filename = list(uploaded.keys())[0]\n",
        "qa_df = pd.read_json(filename, lines=True)\n",
        "qa_df.head(3)"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 199
        },
        "collapsed": true,
        "outputId": "584be27f-d4f0-47e9-e51a-a970bc610717",
        "id": "cBV2bNjWyZOz"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<IPython.core.display.HTML object>"
            ],
            "text/html": [
              "\n",
              "     <input type=\"file\" id=\"files-f407d6bb-0df6-4c5e-abe9-7bc6d183c576\" name=\"files[]\" multiple disabled\n",
              "        style=\"border:none\" />\n",
              "     <output id=\"result-f407d6bb-0df6-4c5e-abe9-7bc6d183c576\">\n",
              "      Upload widget is only available when the cell has been executed in the\n",
              "      current browser session. Please rerun this cell to enable.\n",
              "      </output>\n",
              "      <script>// Copyright 2017 Google LLC\n",
              "//\n",
              "// Licensed under the Apache License, Version 2.0 (the \"License\");\n",
              "// you may not use this file except in compliance with the License.\n",
              "// You may obtain a copy of the License at\n",
              "//\n",
              "//      http://www.apache.org/licenses/LICENSE-2.0\n",
              "//\n",
              "// Unless required by applicable law or agreed to in writing, software\n",
              "// distributed under the License is distributed on an \"AS IS\" BASIS,\n",
              "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
              "// See the License for the specific language governing permissions and\n",
              "// limitations under the License.\n",
              "\n",
              "/**\n",
              " * @fileoverview Helpers for google.colab Python module.\n",
              " */\n",
              "(function(scope) {\n",
              "function span(text, styleAttributes = {}) {\n",
              "  const element = document.createElement('span');\n",
              "  element.textContent = text;\n",
              "  for (const key of Object.keys(styleAttributes)) {\n",
              "    element.style[key] = styleAttributes[key];\n",
              "  }\n",
              "  return element;\n",
              "}\n",
              "\n",
              "// Max number of bytes which will be uploaded at a time.\n",
              "const MAX_PAYLOAD_SIZE = 100 * 1024;\n",
              "\n",
              "function _uploadFiles(inputId, outputId) {\n",
              "  const steps = uploadFilesStep(inputId, outputId);\n",
              "  const outputElement = document.getElementById(outputId);\n",
              "  // Cache steps on the outputElement to make it available for the next call\n",
              "  // to uploadFilesContinue from Python.\n",
              "  outputElement.steps = steps;\n",
              "\n",
              "  return _uploadFilesContinue(outputId);\n",
              "}\n",
              "\n",
              "// This is roughly an async generator (not supported in the browser yet),\n",
              "// where there are multiple asynchronous steps and the Python side is going\n",
              "// to poll for completion of each step.\n",
              "// This uses a Promise to block the python side on completion of each step,\n",
              "// then passes the result of the previous step as the input to the next step.\n",
              "function _uploadFilesContinue(outputId) {\n",
              "  const outputElement = document.getElementById(outputId);\n",
              "  const steps = outputElement.steps;\n",
              "\n",
              "  const next = steps.next(outputElement.lastPromiseValue);\n",
              "  return Promise.resolve(next.value.promise).then((value) => {\n",
              "    // Cache the last promise value to make it available to the next\n",
              "    // step of the generator.\n",
              "    outputElement.lastPromiseValue = value;\n",
              "    return next.value.response;\n",
              "  });\n",
              "}\n",
              "\n",
              "/**\n",
              " * Generator function which is called between each async step of the upload\n",
              " * process.\n",
              " * @param {string} inputId Element ID of the input file picker element.\n",
              " * @param {string} outputId Element ID of the output display.\n",
              " * @return {!Iterable<!Object>} Iterable of next steps.\n",
              " */\n",
              "function* uploadFilesStep(inputId, outputId) {\n",
              "  const inputElement = document.getElementById(inputId);\n",
              "  inputElement.disabled = false;\n",
              "\n",
              "  const outputElement = document.getElementById(outputId);\n",
              "  outputElement.innerHTML = '';\n",
              "\n",
              "  const pickedPromise = new Promise((resolve) => {\n",
              "    inputElement.addEventListener('change', (e) => {\n",
              "      resolve(e.target.files);\n",
              "    });\n",
              "  });\n",
              "\n",
              "  const cancel = document.createElement('button');\n",
              "  inputElement.parentElement.appendChild(cancel);\n",
              "  cancel.textContent = 'Cancel upload';\n",
              "  const cancelPromise = new Promise((resolve) => {\n",
              "    cancel.onclick = () => {\n",
              "      resolve(null);\n",
              "    };\n",
              "  });\n",
              "\n",
              "  // Wait for the user to pick the files.\n",
              "  const files = yield {\n",
              "    promise: Promise.race([pickedPromise, cancelPromise]),\n",
              "    response: {\n",
              "      action: 'starting',\n",
              "    }\n",
              "  };\n",
              "\n",
              "  cancel.remove();\n",
              "\n",
              "  // Disable the input element since further picks are not allowed.\n",
              "  inputElement.disabled = true;\n",
              "\n",
              "  if (!files) {\n",
              "    return {\n",
              "      response: {\n",
              "        action: 'complete',\n",
              "      }\n",
              "    };\n",
              "  }\n",
              "\n",
              "  for (const file of files) {\n",
              "    const li = document.createElement('li');\n",
              "    li.append(span(file.name, {fontWeight: 'bold'}));\n",
              "    li.append(span(\n",
              "        `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n",
              "        `last modified: ${\n",
              "            file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n",
              "                                    'n/a'} - `));\n",
              "    const percent = span('0% done');\n",
              "    li.appendChild(percent);\n",
              "\n",
              "    outputElement.appendChild(li);\n",
              "\n",
              "    const fileDataPromise = new Promise((resolve) => {\n",
              "      const reader = new FileReader();\n",
              "      reader.onload = (e) => {\n",
              "        resolve(e.target.result);\n",
              "      };\n",
              "      reader.readAsArrayBuffer(file);\n",
              "    });\n",
              "    // Wait for the data to be ready.\n",
              "    let fileData = yield {\n",
              "      promise: fileDataPromise,\n",
              "      response: {\n",
              "        action: 'continue',\n",
              "      }\n",
              "    };\n",
              "\n",
              "    // Use a chunked sending to avoid message size limits. See b/62115660.\n",
              "    let position = 0;\n",
              "    do {\n",
              "      const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n",
              "      const chunk = new Uint8Array(fileData, position, length);\n",
              "      position += length;\n",
              "\n",
              "      const base64 = btoa(String.fromCharCode.apply(null, chunk));\n",
              "      yield {\n",
              "        response: {\n",
              "          action: 'append',\n",
              "          file: file.name,\n",
              "          data: base64,\n",
              "        },\n",
              "      };\n",
              "\n",
              "      let percentDone = fileData.byteLength === 0 ?\n",
              "          100 :\n",
              "          Math.round((position / fileData.byteLength) * 100);\n",
              "      percent.textContent = `${percentDone}% done`;\n",
              "\n",
              "    } while (position < fileData.byteLength);\n",
              "  }\n",
              "\n",
              "  // All done.\n",
              "  yield {\n",
              "    response: {\n",
              "      action: 'complete',\n",
              "    }\n",
              "  };\n",
              "}\n",
              "\n",
              "scope.google = scope.google || {};\n",
              "scope.google.colab = scope.google.colab || {};\n",
              "scope.google.colab._files = {\n",
              "  _uploadFiles,\n",
              "  _uploadFilesContinue,\n",
              "};\n",
              "})(self);\n",
              "</script> "
            ]
          },
          "metadata": {}
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Saving final_questions.jsonl to final_questions.jsonl\n"
          ]
        },
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "   id                                           question\n",
              "0   0   How deep can fish survive in the ocean trenches?\n",
              "1   1  Based on temperature considerations alone, is ...\n",
              "2   2  What major acts performed at the Brighton Hipp..."
            ],
            "text/html": [
              "\n",
              "  <div id=\"df-b2fcb895-35b1-4b70-81b9-90b51ff7591e\" class=\"colab-df-container\">\n",
              "    <div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>id</th>\n",
              "      <th>question</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>0</td>\n",
              "      <td>How deep can fish survive in the ocean trenches?</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1</td>\n",
              "      <td>Based on temperature considerations alone, is ...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>2</td>\n",
              "      <td>What major acts performed at the Brighton Hipp...</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "</div>\n",
              "    <div class=\"colab-df-buttons\">\n",
              "\n",
              "  <div class=\"colab-df-container\">\n",
              "    <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-b2fcb895-35b1-4b70-81b9-90b51ff7591e')\"\n",
              "            title=\"Convert this dataframe to an interactive table.\"\n",
              "            style=\"display:none;\">\n",
              "\n",
              "  <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\" viewBox=\"0 -960 960 960\">\n",
              "    <path d=\"M120-120v-720h720v720H120Zm60-500h600v-160H180v160Zm220 220h160v-160H400v160Zm0 220h160v-160H400v160ZM180-400h160v-160H180v160Zm440 0h160v-160H620v160ZM180-180h160v-160H180v160Zm440 0h160v-160H620v160Z\"/>\n",
              "  </svg>\n",
              "    </button>\n",
              "\n",
              "  <style>\n",
              "    .colab-df-container {\n",
              "      display:flex;\n",
              "      gap: 12px;\n",
              "    }\n",
              "\n",
              "    .colab-df-convert {\n",
              "      background-color: #E8F0FE;\n",
              "      border: none;\n",
              "      border-radius: 50%;\n",
              "      cursor: pointer;\n",
              "      display: none;\n",
              "      fill: #1967D2;\n",
              "      height: 32px;\n",
              "      padding: 0 0 0 0;\n",
              "      width: 32px;\n",
              "    }\n",
              "\n",
              "    .colab-df-convert:hover {\n",
              "      background-color: #E2EBFA;\n",
              "      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
              "      fill: #174EA6;\n",
              "    }\n",
              "\n",
              "    .colab-df-buttons div {\n",
              "      margin-bottom: 4px;\n",
              "    }\n",
              "\n",
              "    [theme=dark] .colab-df-convert {\n",
              "      background-color: #3B4455;\n",
              "      fill: #D2E3FC;\n",
              "    }\n",
              "\n",
              "    [theme=dark] .colab-df-convert:hover {\n",
              "      background-color: #434B5C;\n",
              "      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
              "      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
              "      fill: #FFFFFF;\n",
              "    }\n",
              "  </style>\n",
              "\n",
              "    <script>\n",
              "      const buttonEl =\n",
              "        document.querySelector('#df-b2fcb895-35b1-4b70-81b9-90b51ff7591e button.colab-df-convert');\n",
              "      buttonEl.style.display =\n",
              "        google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
              "\n",
              "      async function convertToInteractive(key) {\n",
              "        const element = document.querySelector('#df-b2fcb895-35b1-4b70-81b9-90b51ff7591e');\n",
              "        const dataTable =\n",
              "          await google.colab.kernel.invokeFunction('convertToInteractive',\n",
              "                                                    [key], {});\n",
              "        if (!dataTable) return;\n",
              "\n",
              "        const docLinkHtml = 'Like what you see? Visit the ' +\n",
              "          '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n",
              "          + ' to learn more about interactive tables.';\n",
              "        element.innerHTML = '';\n",
              "        dataTable['output_type'] = 'display_data';\n",
              "        await google.colab.output.renderOutput(dataTable, element);\n",
              "        const docLink = document.createElement('div');\n",
              "        docLink.innerHTML = docLinkHtml;\n",
              "        element.appendChild(docLink);\n",
              "      }\n",
              "    </script>\n",
              "  </div>\n",
              "\n",
              "\n",
              "    <div id=\"df-cdfbe93f-04e5-43ad-9619-82e23785dfe2\">\n",
              "      <button class=\"colab-df-quickchart\" onclick=\"quickchart('df-cdfbe93f-04e5-43ad-9619-82e23785dfe2')\"\n",
              "                title=\"Suggest charts\"\n",
              "                style=\"display:none;\">\n",
              "\n",
              "<svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n",
              "     width=\"24px\">\n",
              "    <g>\n",
              "        <path d=\"M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM9 17H7v-7h2v7zm4 0h-2V7h2v10zm4 0h-2v-4h2v4z\"/>\n",
              "    </g>\n",
              "</svg>\n",
              "      </button>\n",
              "\n",
              "<style>\n",
              "  .colab-df-quickchart {\n",
              "      --bg-color: #E8F0FE;\n",
              "      --fill-color: #1967D2;\n",
              "      --hover-bg-color: #E2EBFA;\n",
              "      --hover-fill-color: #174EA6;\n",
              "      --disabled-fill-color: #AAA;\n",
              "      --disabled-bg-color: #DDD;\n",
              "  }\n",
              "\n",
              "  [theme=dark] .colab-df-quickchart {\n",
              "      --bg-color: #3B4455;\n",
              "      --fill-color: #D2E3FC;\n",
              "      --hover-bg-color: #434B5C;\n",
              "      --hover-fill-color: #FFFFFF;\n",
              "      --disabled-bg-color: #3B4455;\n",
              "      --disabled-fill-color: #666;\n",
              "  }\n",
              "\n",
              "  .colab-df-quickchart {\n",
              "    background-color: var(--bg-color);\n",
              "    border: none;\n",
              "    border-radius: 50%;\n",
              "    cursor: pointer;\n",
              "    display: none;\n",
              "    fill: var(--fill-color);\n",
              "    height: 32px;\n",
              "    padding: 0;\n",
              "    width: 32px;\n",
              "  }\n",
              "\n",
              "  .colab-df-quickchart:hover {\n",
              "    background-color: var(--hover-bg-color);\n",
              "    box-shadow: 0 1px 2px rgba(60, 64, 67, 0.3), 0 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
              "    fill: var(--button-hover-fill-color);\n",
              "  }\n",
              "\n",
              "  .colab-df-quickchart-complete:disabled,\n",
              "  .colab-df-quickchart-complete:disabled:hover {\n",
              "    background-color: var(--disabled-bg-color);\n",
              "    fill: var(--disabled-fill-color);\n",
              "    box-shadow: none;\n",
              "  }\n",
              "\n",
              "  .colab-df-spinner {\n",
              "    border: 2px solid var(--fill-color);\n",
              "    border-color: transparent;\n",
              "    border-bottom-color: var(--fill-color);\n",
              "    animation:\n",
              "      spin 1s steps(1) infinite;\n",
              "  }\n",
              "\n",
              "  @keyframes spin {\n",
              "    0% {\n",
              "      border-color: transparent;\n",
              "      border-bottom-color: var(--fill-color);\n",
              "      border-left-color: var(--fill-color);\n",
              "    }\n",
              "    20% {\n",
              "      border-color: transparent;\n",
              "      border-left-color: var(--fill-color);\n",
              "      border-top-color: var(--fill-color);\n",
              "    }\n",
              "    30% {\n",
              "      border-color: transparent;\n",
              "      border-left-color: var(--fill-color);\n",
              "      border-top-color: var(--fill-color);\n",
              "      border-right-color: var(--fill-color);\n",
              "    }\n",
              "    40% {\n",
              "      border-color: transparent;\n",
              "      border-right-color: var(--fill-color);\n",
              "      border-top-color: var(--fill-color);\n",
              "    }\n",
              "    60% {\n",
              "      border-color: transparent;\n",
              "      border-right-color: var(--fill-color);\n",
              "    }\n",
              "    80% {\n",
              "      border-color: transparent;\n",
              "      border-right-color: var(--fill-color);\n",
              "      border-bottom-color: var(--fill-color);\n",
              "    }\n",
              "    90% {\n",
              "      border-color: transparent;\n",
              "      border-bottom-color: var(--fill-color);\n",
              "    }\n",
              "  }\n",
              "</style>\n",
              "\n",
              "      <script>\n",
              "        async function quickchart(key) {\n",
              "          const quickchartButtonEl =\n",
              "            document.querySelector('#' + key + ' button');\n",
              "          quickchartButtonEl.disabled = true;  // To prevent multiple clicks.\n",
              "          quickchartButtonEl.classList.add('colab-df-spinner');\n",
              "          try {\n",
              "            const charts = await google.colab.kernel.invokeFunction(\n",
              "                'suggestCharts', [key], {});\n",
              "          } catch (error) {\n",
              "            console.error('Error during call to suggestCharts:', error);\n",
              "          }\n",
              "          quickchartButtonEl.classList.remove('colab-df-spinner');\n",
              "          quickchartButtonEl.classList.add('colab-df-quickchart-complete');\n",
              "        }\n",
              "        (() => {\n",
              "          let quickchartButtonEl =\n",
              "            document.querySelector('#df-cdfbe93f-04e5-43ad-9619-82e23785dfe2 button');\n",
              "          quickchartButtonEl.style.display =\n",
              "            google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
              "        })();\n",
              "      </script>\n",
              "    </div>\n",
              "\n",
              "    </div>\n",
              "  </div>\n"
            ],
            "application/vnd.google.colaboratory.intrinsic+json": {
              "type": "dataframe",
              "variable_name": "qa_df",
              "summary": "{\n  \"name\": \"qa_df\",\n  \"rows\": 500,\n  \"fields\": [\n    {\n      \"column\": \"id\",\n      \"properties\": {\n        \"dtype\": \"number\",\n        \"std\": 144,\n        \"min\": 0,\n        \"max\": 499,\n        \"num_unique_values\": 500,\n        \"samples\": [\n          361,\n          73,\n          374\n        ],\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    },\n    {\n      \"column\": \"question\",\n      \"properties\": {\n        \"dtype\": \"string\",\n        \"num_unique_values\": 500,\n        \"samples\": [\n          \"What are the four main hypotheses that have been proposed to explain the formation of Artemis on Venus?\",\n          \"Could you please list all the key actions that Ready Fairfax asks residents and neighbors to take for emergency preparedness?\",\n          \"For how many years was Jubilee Showcase broadcast on TV?\"\n        ],\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    }\n  ]\n}"
            }
          },
          "metadata": {},
          "execution_count": 30
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "#@title since pinecone raised rate limit error, split the testing dataset\n",
        "qa_first_split_df = qa_df.head(300).copy()\n",
        "qa_last_split_df = qa_df.iloc[300:].copy()"
      ],
      "metadata": {
        "id": "N23FjhCNyZO0"
      },
      "execution_count": null,
      "outputs": []
    },
    {
      "cell_type": "code",
      "source": [
        "#@title retrieve first split passages\n",
        "print('Retrieving documents')\n",
        "retriever = Retriever(qa_first_split_df, top_k=200)\n",
        "start = time()\n",
        "dense_df = retriever.retrieve(indexer=\"pinecone\")\n",
        "retriever.save_results(dense_df, \"dense_retr_passages_first_split.csv\")\n",
        "end = time()\n",
        "print(f'Documents Retrieved from Pinecone: {end-start:.2f}')\n",
        "\n",
        "# Retrieve\n",
        "# OpenSearch retrieval\n",
        "start = time()\n",
        "sparse_df = retriever.retrieve(indexer=\"opensearch\")\n",
        "retriever.save_results(sparse_df, \"sparse_retr_passages_first_split.csv\")\n",
        "end = time()\n",
        "print(f'Documents Retrieved from Opensearch: {end-start:.2f}')\n",
        "\n",
        "#hybrid retrieval\n",
        "start = time()\n",
        "retriever = Retriever(qa_first_split_df, top_k=200, dense_ind=dense_df, sparse_ind=sparse_df)\n",
        "hybrid_df = retriever.retrieve(indexer=\"merged\")\n",
        "retriever.save_results(hybrid_df, \"hybrid_retr_passages_first_split.csv\")\n",
        "end = time()\n",
        "print(f'Documents Retrieved from Hybrid: {end-start:.2f}')"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "b5674171-447e-43a7-b4af-c52630df5ca0",
        "id": "ZL3rHewnyZO1"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Retrieving documents\n",
            "Documents Retrieved from Pinecone: 67.16\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "#@title run first split\n",
        "generator = AugmentedGenerator(model=\"tiiuae/falcon3-10b-instruct\")\n",
        "retrieval_df = hybrid_df\n",
        "data = pd.merge(retrieval_df, qa_first_split_df, on=\"id\", how=\"inner\")\n",
        "\n",
        "topclustrag_first_split = run_TopClustRAG(\n",
        "    data, generator, reranker, n_parallel=10, k_range=range(40, 70, 5))"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "collapsed": true,
        "outputId": "35d6ea37-4832-43c4-a19c-d916e29084a1",
        "id": "bRg_k3VXyZO3"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  0%|          | 0/300 [00:00<?, ?it/s]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 4.67 seconds\n",
            "  → Reranker took 2.46 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  0%|          | 1/300 [00:09<45:12,  9.07s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.53 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.34 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 45\n",
            "  → Generator took 3.79 seconds\n",
            "  → Reranker took 0.52 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  1%|          | 2/300 [00:14<34:52,  7.02s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.70 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.10 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  1%|          | 3/300 [00:19<28:48,  5.82s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.74 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 3.90 seconds\n",
            "  → Reranker took 0.63 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  1%|▏         | 4/300 [00:25<29:17,  5.94s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.24 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 5.90 seconds\n",
            "  → Reranker took 2.40 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  2%|▏         | 5/300 [00:34<36:00,  7.32s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.35 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 45\n",
            "  → Generator took 3.24 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  2%|▏         | 6/300 [00:40<32:53,  6.71s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.28 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.44 seconds\n",
            "  → Reranker took 0.27 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  2%|▏         | 7/300 [00:46<31:26,  6.44s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.49 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.64 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 3.81 seconds\n",
            "  → Reranker took 0.62 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  3%|▎         | 8/300 [00:52<30:58,  6.37s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.74 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  3%|▎         | 9/300 [00:57<28:15,  5.83s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.02 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 40\n",
            "  → Generator took 2.68 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  3%|▎         | 10/300 [01:00<24:53,  5.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.40 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.37 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  4%|▎         | 11/300 [01:06<25:30,  5.30s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.47 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  4%|▍         | 12/300 [01:11<24:38,  5.13s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.58 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  4%|▍         | 13/300 [01:16<24:38,  5.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.22 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.37 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  5%|▍         | 14/300 [01:21<24:05,  5.05s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 45\n",
            "  → Generator took 3.49 seconds\n",
            "  → Reranker took 0.45 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  5%|▌         | 15/300 [01:26<24:15,  5.11s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.92 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.51 seconds\n",
            "\n",
            "  → Clusterer took 0.40 seconds | best k = 40\n",
            "  → Generator took 3.38 seconds\n",
            "  → Reranker took 0.52 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  5%|▌         | 16/300 [01:32<25:50,  5.46s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.42 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 3.46 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  6%|▌         | 17/300 [01:37<25:02,  5.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.07 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 45\n",
            "  → Generator took 4.34 seconds\n",
            "  → Reranker took 0.88 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  6%|▌         | 18/300 [01:44<27:16,  5.80s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.38 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.64 seconds\n",
            "  → Reranker took 0.93 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  6%|▋         | 19/300 [01:51<28:45,  6.14s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.03 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.48 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 2.87 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  7%|▋         | 20/300 [01:56<26:25,  5.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.31 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  7%|▋         | 21/300 [02:01<25:36,  5.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.35 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.31 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  7%|▋         | 22/300 [02:06<24:36,  5.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.35 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 60\n",
            "  → Generator took 3.91 seconds\n",
            "  → Reranker took 0.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  8%|▊         | 23/300 [02:11<25:02,  5.42s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 3.91 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  8%|▊         | 24/300 [02:16<24:15,  5.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.68 seconds\n",
            "  → Reranker took 0.01 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  8%|▊         | 25/300 [02:21<23:20,  5.09s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.38 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 2.66 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  9%|▊         | 26/300 [02:25<21:46,  4.77s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.56 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.35 seconds\n",
            "  → Reranker took 0.69 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  9%|▉         | 27/300 [02:30<22:33,  4.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 55\n",
            "  → Generator took 3.72 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  9%|▉         | 28/300 [02:35<22:33,  4.97s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.82 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 50\n",
            "  → Generator took 4.46 seconds\n",
            "  → Reranker took 0.71 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 10%|▉         | 29/300 [02:42<24:18,  5.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 2.94 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 10%|█         | 30/300 [02:46<22:11,  4.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.72 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 2.60 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 10%|█         | 31/300 [02:50<21:13,  4.73s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 55\n",
            "  → Generator took 3.84 seconds\n",
            "  → Reranker took 0.51 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 11%|█         | 32/300 [02:56<23:01,  5.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.45 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 11%|█         | 33/300 [03:00<21:24,  4.81s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.62 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 55\n",
            "  → Generator took 3.72 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 11%|█▏        | 34/300 [03:05<22:08,  4.99s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.19 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "Error: APIError: - HTTP 502 error: <html>\n",
            "<head><title>502 Bad Gateway</title></head>\n",
            "<body>\n",
            "<center><h1>502 Bad Gateway</h1></center>\n",
            "</body>\n",
            "</html>\n",
            " | Request 1 | Retry 0\n",
            "  → Generator took 6.23 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 12%|█▏        | 35/300 [03:13<24:53,  5.64s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 7.49 seconds\n",
            "  → Reranker took 0.76 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 12%|█▏        | 36/300 [03:22<30:09,  6.85s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.07 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.26 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 3.10 seconds\n",
            "  → Reranker took 0.74 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 12%|█▏        | 37/300 [03:28<28:35,  6.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.31 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.34 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "  → Generator took 3.48 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 13%|█▎        | 38/300 [03:33<26:44,  6.13s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.93 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 3.19 seconds\n",
            "  → Reranker took 0.87 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 13%|█▎        | 39/300 [03:39<26:48,  6.16s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.75 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.23 seconds | best k = 50\n",
            "  → Generator took 3.72 seconds\n",
            "  → Reranker took 0.48 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 13%|█▎        | 40/300 [03:45<25:38,  5.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.78 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.89 seconds\n",
            "  → Reranker took 1.63 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 14%|█▎        | 41/300 [03:52<27:16,  6.32s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.36 seconds\n",
            "Tf-Idf embeddings generated in 0.13 seconds\n",
            "Reduced dimensions in 0.89 seconds\n",
            "\n",
            "  → Clusterer took 0.57 seconds | best k = 40\n",
            "  → Generator took 2.78 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 14%|█▍        | 42/300 [03:57<25:58,  6.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.82 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 55\n",
            "  → Generator took 4.23 seconds\n",
            "  → Reranker took 1.01 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 14%|█▍        | 43/300 [04:05<27:43,  6.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.66 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.27 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 2.94 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 15%|█▍        | 44/300 [04:09<25:03,  5.87s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.71 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.21 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 15%|█▌        | 45/300 [04:14<23:25,  5.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.05 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.72 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 15%|█▌        | 46/300 [04:18<20:58,  4.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.38 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 45\n",
            "  → Generator took 3.61 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 16%|█▌        | 47/300 [04:24<22:22,  5.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.55 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 60\n",
            "  → Generator took 5.35 seconds\n",
            "  → Reranker took 1.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 16%|█▌        | 48/300 [04:32<25:52,  6.16s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.24 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.49 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 40\n",
            "  → Generator took 2.97 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 16%|█▋        | 49/300 [04:37<24:01,  5.74s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.80 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 55\n",
            "  → Generator took 5.00 seconds\n",
            "  → Reranker took 0.51 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 17%|█▋        | 50/300 [04:44<25:37,  6.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.21 seconds\n",
            "✅ Saved batch 0 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_0_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.68 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 3.20 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 17%|█▋        | 51/300 [04:50<25:53,  6.24s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.03 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 2.76 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 17%|█▋        | 52/300 [04:54<23:03,  5.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.79 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.11 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 18%|█▊        | 53/300 [04:59<21:28,  5.22s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.71 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.29 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 40\n",
            "  → Generator took 2.86 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 18%|█▊        | 54/300 [05:03<20:09,  4.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.91 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 18%|█▊        | 55/300 [05:07<18:47,  4.60s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 55\n",
            "  → Generator took 4.25 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 19%|█▊        | 56/300 [05:12<19:29,  4.79s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.29 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 4.79 seconds\n",
            "  → Reranker took 0.54 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 19%|█▉        | 57/300 [05:19<22:13,  5.49s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.21 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 4.36 seconds\n",
            "  → Reranker took 0.59 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 19%|█▉        | 58/300 [05:26<23:16,  5.77s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.13 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 3.38 seconds\n",
            "  → Reranker took 0.51 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 20%|█▉        | 59/300 [05:31<23:00,  5.73s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.31 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.25 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.60 seconds\n",
            "  → Reranker took 1.69 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 20%|██        | 60/300 [05:38<23:50,  5.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.12 seconds\n",
            "Reduced dimensions in 0.86 seconds\n",
            "\n",
            "  → Clusterer took 0.46 seconds | best k = 55\n",
            "  → Generator took 5.51 seconds\n",
            "  → Reranker took 1.71 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 20%|██        | 61/300 [05:47<28:11,  7.08s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.45 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 45\n",
            "  → Generator took 4.30 seconds\n",
            "  → Reranker took 2.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 21%|██        | 62/300 [05:57<30:33,  7.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.83 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.40 seconds | best k = 40\n",
            "  → Generator took 5.09 seconds\n",
            "  → Reranker took 1.91 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 21%|██        | 63/300 [06:06<32:45,  8.29s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.10 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 1.05 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 2.85 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 21%|██▏       | 64/300 [06:11<28:37,  7.28s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.13 seconds\n",
            "Reduced dimensions in 0.36 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 22%|██▏       | 65/300 [06:16<25:34,  6.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 50\n",
            "  → Generator took 3.78 seconds\n",
            "  → Reranker took 0.42 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 22%|██▏       | 66/300 [06:22<24:34,  6.30s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.20 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.69 seconds\n",
            "\n",
            "  → Clusterer took 0.56 seconds | best k = 55\n",
            "  → Generator took 3.95 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 22%|██▏       | 67/300 [06:29<25:23,  6.54s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.69 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 6.13 seconds\n",
            "  → Reranker took 1.80 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 23%|██▎       | 68/300 [06:39<28:59,  7.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.38 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 3.62 seconds\n",
            "  → Reranker took 0.27 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 23%|██▎       | 69/300 [06:44<26:58,  7.01s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.22 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 4.36 seconds\n",
            "  → Reranker took 1.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 23%|██▎       | 70/300 [06:51<26:52,  7.01s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.14 seconds\n",
            "Tf-Idf embeddings generated in 0.12 seconds\n",
            "Reduced dimensions in 0.45 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 65\n",
            "  → Generator took 4.88 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 24%|██▎       | 71/300 [06:58<26:37,  6.98s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.03 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.15 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 24%|██▍       | 72/300 [07:03<23:39,  6.22s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.25 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 55\n",
            "  → Generator took 5.27 seconds\n",
            "  → Reranker took 0.53 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 24%|██▍       | 73/300 [07:10<25:11,  6.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.38 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 50\n",
            "  → Generator took 6.52 seconds\n",
            "  → Reranker took 1.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 25%|██▍       | 74/300 [07:22<30:08,  8.00s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 3.18 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 5.64 seconds\n",
            "  → Reranker took 1.05 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 25%|██▌       | 75/300 [07:30<29:58,  8.00s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.93 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "  → Generator took 6.11 seconds\n",
            "  → Reranker took 0.75 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 25%|██▌       | 76/300 [07:38<29:51,  8.00s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.50 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 26%|██▌       | 77/300 [07:42<26:18,  7.08s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.05 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.89 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 26%|██▌       | 78/300 [07:46<22:35,  6.11s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 55\n",
            "  → Generator took 5.03 seconds\n",
            "  → Reranker took 0.98 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 26%|██▋       | 79/300 [07:54<23:53,  6.49s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.95 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 27%|██▋       | 80/300 [07:59<22:03,  6.01s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.11 seconds\n",
            "Reduced dimensions in 0.25 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 2.76 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 27%|██▋       | 81/300 [08:03<19:45,  5.41s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.66 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.75 seconds\n",
            "\n",
            "  → Clusterer took 0.41 seconds | best k = 55\n",
            "  → Generator took 4.71 seconds\n",
            "  → Reranker took 1.40 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 27%|██▋       | 82/300 [08:11<23:04,  6.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.20 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.48 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 3.53 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 28%|██▊       | 83/300 [08:16<21:30,  5.95s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.10 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 28%|██▊       | 84/300 [08:21<19:57,  5.54s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.97 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.60 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 40\n",
            "  → Generator took 3.40 seconds\n",
            "  → Reranker took 0.33 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 28%|██▊       | 85/300 [08:27<20:41,  5.77s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.59 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 2.78 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 29%|██▊       | 86/300 [08:31<18:26,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 4.38 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 29%|██▉       | 87/300 [08:37<19:37,  5.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.40 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.12 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 29%|██▉       | 88/300 [08:42<18:57,  5.37s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.29 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 45\n",
            "  → Generator took 6.05 seconds\n",
            "  → Reranker took 1.35 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 30%|██▉       | 89/300 [08:51<22:54,  6.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.45 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.01 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 30%|███       | 90/300 [08:56<20:56,  5.98s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.23 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.36 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 30%|███       | 91/300 [09:01<19:26,  5.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.67 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 60\n",
            "  → Generator took 5.77 seconds\n",
            "  → Reranker took 0.56 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 31%|███       | 92/300 [09:09<22:07,  6.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.54 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 65\n",
            "  → Generator took 6.94 seconds\n",
            "  → Reranker took 0.83 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 31%|███       | 93/300 [09:18<24:52,  7.21s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.08 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.73 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 31%|███▏      | 94/300 [09:22<21:32,  6.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.74 seconds\n",
            "Tf-Idf embeddings generated in 0.16 seconds\n",
            "Reduced dimensions in 0.86 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 40\n",
            "  → Generator took 2.79 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 32%|███▏      | 95/300 [09:31<23:52,  6.99s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 4.32 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 3.99 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 32%|███▏      | 96/300 [09:37<22:26,  6.60s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.18 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.56 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 45\n",
            "  → Generator took 4.51 seconds\n",
            "  → Reranker took 0.59 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 32%|███▏      | 97/300 [09:44<23:20,  6.90s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.50 seconds\n",
            "Tf-Idf embeddings generated in 0.22 seconds\n",
            "Reduced dimensions in 0.59 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.48 seconds\n",
            "  → Reranker took 0.89 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 33%|███▎      | 98/300 [09:51<23:00,  6.84s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.32 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.47 seconds\n",
            "\n",
            "  → Clusterer took 1.13 seconds | best k = 40\n",
            "  → Generator took 2.98 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 33%|███▎      | 99/300 [09:57<21:51,  6.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.20 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 4.97 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 33%|███▎      | 100/300 [10:03<21:27,  6.44s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.67 seconds\n",
            "✅ Saved batch 1 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_1_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 45\n",
            "  → Generator took 3.96 seconds\n",
            "  → Reranker took 0.46 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 34%|███▎      | 101/300 [10:08<20:28,  6.18s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.75 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 55\n",
            "  → Generator took 4.60 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 34%|███▍      | 102/300 [10:16<21:45,  6.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.25 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.34 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 3.21 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 34%|███▍      | 103/300 [10:22<21:22,  6.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.06 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 35%|███▍      | 104/300 [10:27<19:18,  5.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.95 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 4.05 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 35%|███▌      | 105/300 [10:32<18:19,  5.64s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.44 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 2.92 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 35%|███▌      | 106/300 [10:36<16:55,  5.23s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.83 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 36%|███▌      | 107/300 [10:40<15:28,  4.81s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.62 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 36%|███▌      | 108/300 [10:44<14:34,  4.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 55\n",
            "  → Generator took 3.96 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 36%|███▋      | 109/300 [10:49<15:18,  4.81s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.23 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 37%|███▋      | 110/300 [10:54<15:22,  4.86s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.16 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 37%|███▋      | 111/300 [10:59<15:30,  4.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.05 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.40 seconds\n",
            "\n",
            "  → Clusterer took 0.29 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 37%|███▋      | 112/300 [11:04<15:10,  4.84s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.63 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.47 seconds\n",
            "  → Reranker took 0.27 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 38%|███▊      | 113/300 [11:10<15:59,  5.13s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.67 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 3.06 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 38%|███▊      | 114/300 [11:15<15:32,  5.02s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.03 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.27 seconds\n",
            "  → Reranker took 0.33 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 38%|███▊      | 115/300 [11:19<15:12,  4.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.78 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.19 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 39%|███▊      | 116/300 [11:24<14:24,  4.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.45 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 3.98 seconds\n",
            "  → Reranker took 0.59 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 39%|███▉      | 117/300 [11:30<15:36,  5.12s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.70 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.52 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 39%|███▉      | 118/300 [11:35<15:30,  5.11s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.03 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.26 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 40%|███▉      | 119/300 [11:39<14:43,  4.88s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.73 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 5.30 seconds\n",
            "  → Reranker took 0.79 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 40%|████      | 120/300 [11:48<18:35,  6.20s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.79 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 40\n",
            "  → Generator took 3.84 seconds\n",
            "  → Reranker took 0.37 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 40%|████      | 121/300 [11:54<18:27,  6.19s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.51 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 41%|████      | 122/300 [11:59<16:28,  5.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.19 seconds\n",
            "Reduced dimensions in 0.84 seconds\n",
            "\n",
            "  → Clusterer took 0.57 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 2.07 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 41%|████      | 123/300 [12:07<18:58,  6.43s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.53 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.77 seconds\n",
            "\n",
            "  → Clusterer took 0.69 seconds | best k = 40\n",
            "  → Generator took 2.71 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 41%|████▏     | 124/300 [12:12<17:28,  5.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.17 seconds\n",
            "Reduced dimensions in 0.91 seconds\n",
            "\n",
            "  → Clusterer took 0.61 seconds | best k = 40\n",
            "  → Generator took 3.45 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 42%|████▏     | 125/300 [12:19<18:31,  6.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.73 seconds\n",
            "Tf-Idf embeddings generated in 0.48 seconds\n",
            "Reduced dimensions in 2.06 seconds\n",
            "\n",
            "  → Clusterer took 0.49 seconds | best k = 40\n",
            "  → Generator took 3.08 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 42%|████▏     | 126/300 [12:26<19:00,  6.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.90 seconds\n",
            "Tf-Idf embeddings generated in 0.73 seconds\n",
            "Reduced dimensions in 0.27 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 3.38 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 42%|████▏     | 127/300 [12:32<18:20,  6.36s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.60 seconds\n",
            "\n",
            "  → Clusterer took 0.38 seconds | best k = 45\n",
            "  → Generator took 5.41 seconds\n",
            "  → Reranker took 1.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 43%|████▎     | 128/300 [12:42<21:03,  7.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.99 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 43%|████▎     | 129/300 [12:46<18:09,  6.37s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.49 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 55\n",
            "  → Generator took 4.73 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 43%|████▎     | 130/300 [12:52<18:00,  6.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.71 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.36 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 5.23 seconds\n",
            "  → Reranker took 1.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 44%|████▎     | 131/300 [13:01<19:39,  6.98s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.36 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 1.00 seconds\n",
            "\n",
            "  → Clusterer took 0.52 seconds | best k = 65\n",
            "  → Generator took 5.02 seconds\n",
            "  → Reranker took 1.42 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 44%|████▍     | 132/300 [13:10<21:17,  7.61s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.00 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.81 seconds\n",
            "\n",
            "  → Clusterer took 0.54 seconds | best k = 40\n",
            "  → Generator took 3.80 seconds\n",
            "  → Reranker took 2.58 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 44%|████▍     | 133/300 [13:18<22:10,  7.97s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.43 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 50\n",
            "  → Generator took 3.54 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 45%|████▍     | 134/300 [13:23<19:34,  7.08s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.36 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 3.06 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 45%|████▌     | 135/300 [13:29<18:05,  6.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.18 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.44 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 2.97 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 45%|████▌     | 136/300 [13:34<16:34,  6.07s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.86 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 4.31 seconds\n",
            "  → Reranker took 0.66 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 46%|████▌     | 137/300 [13:40<16:50,  6.20s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.21 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.23 seconds | best k = 40\n",
            "  → Generator took 3.14 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 46%|████▌     | 138/300 [13:45<15:33,  5.76s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 45\n",
            "  → Generator took 3.36 seconds\n",
            "  → Reranker took 0.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 46%|████▋     | 139/300 [13:50<14:40,  5.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.97 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 45\n",
            "  → Generator took 3.42 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 47%|████▋     | 140/300 [13:54<13:49,  5.19s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 3.30 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 47%|████▋     | 141/300 [13:59<13:42,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.38 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 47%|████▋     | 142/300 [14:05<13:33,  5.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.21 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.28 seconds\n",
            "  → Reranker took 0.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 48%|████▊     | 143/300 [14:09<12:58,  4.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.30 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 48%|████▊     | 144/300 [14:14<12:54,  4.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.43 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 48%|████▊     | 145/300 [14:19<13:08,  5.09s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.54 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.26 seconds\n",
            "\n",
            "  → Clusterer took 0.40 seconds | best k = 40\n",
            "  → Generator took 6.00 seconds\n",
            "  → Reranker took 0.54 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 49%|████▊     | 146/300 [14:30<17:02,  6.64s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.99 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 49%|████▉     | 147/300 [14:34<15:04,  5.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.67 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.31 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 45\n",
            "  → Generator took 5.28 seconds\n",
            "  → Reranker took 1.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 49%|████▉     | 148/300 [14:43<17:20,  6.84s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.78 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.15 seconds\n",
            "  → Reranker took 0.31 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 50%|████▉     | 149/300 [14:48<15:54,  6.32s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.27 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 45\n",
            "  → Generator took 3.46 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 50%|█████     | 150/300 [14:52<14:24,  5.76s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.70 seconds\n",
            "✅ Saved batch 2 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_2_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.66 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 50%|█████     | 151/300 [14:58<14:07,  5.69s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.13 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.67 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 3.61 seconds\n",
            "  → Reranker took 1.53 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 51%|█████     | 152/300 [15:05<15:15,  6.18s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.05 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.01 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 51%|█████     | 153/300 [15:11<15:01,  6.13s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.38 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.85 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 65\n",
            "  → Generator took 4.57 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 51%|█████▏    | 154/300 [15:18<15:35,  6.41s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.07 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.58 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 52%|█████▏    | 155/300 [15:22<13:36,  5.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.77 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.59 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 52%|█████▏    | 156/300 [15:26<12:01,  5.01s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.63 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 55\n",
            "  → Generator took 3.81 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 52%|█████▏    | 157/300 [15:31<12:18,  5.16s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.51 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 53%|█████▎    | 158/300 [15:37<12:57,  5.48s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.11 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.74 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 55\n",
            "  → Generator took 6.32 seconds\n",
            "  → Reranker took 0.62 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 53%|█████▎    | 159/300 [15:47<15:28,  6.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.66 seconds\n",
            "  → Reranker took 0.42 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 53%|█████▎    | 160/300 [15:52<14:38,  6.28s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.12 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.67 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 50\n",
            "  → Generator took 4.67 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 54%|█████▎    | 161/300 [15:58<14:31,  6.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.33 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 54%|█████▍    | 162/300 [16:03<13:19,  5.80s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.03 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.52 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 54%|█████▍    | 163/300 [16:08<12:32,  5.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 55%|█████▍    | 164/300 [16:14<12:44,  5.62s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.26 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.95 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 55%|█████▌    | 165/300 [16:18<11:57,  5.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.42 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 3.07 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 55%|█████▌    | 166/300 [16:23<11:23,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.35 seconds\n",
            "  → Reranker took 0.32 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 56%|█████▌    | 167/300 [16:29<12:08,  5.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.31 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.38 seconds | best k = 40\n",
            "  → Generator took 4.44 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 56%|█████▌    | 168/300 [16:36<12:37,  5.74s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 56%|█████▋    | 169/300 [16:40<11:52,  5.44s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.13 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 6.46 seconds\n",
            "  → Reranker took 0.57 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 57%|█████▋    | 170/300 [16:49<13:55,  6.43s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.37 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 50\n",
            "  → Generator took 5.58 seconds\n",
            "  → Reranker took 0.57 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 57%|█████▋    | 171/300 [16:57<14:24,  6.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.54 seconds\n",
            "\n",
            "  → Clusterer took 0.36 seconds | best k = 60\n",
            "Error: APIError: - HTTP 502 error: <html>\n",
            "<head><title>502 Bad Gateway</title></head>\n",
            "<body>\n",
            "<center><h1>502 Bad Gateway</h1></center>\n",
            "</body>\n",
            "</html>\n",
            " | Request 3 | Retry 0\n",
            "  → Generator took 9.51 seconds\n",
            "  → Reranker took 0.90 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 57%|█████▋    | 172/300 [17:09<17:52,  8.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.92 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.29 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 60\n",
            "  → Generator took 4.33 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 58%|█████▊    | 173/300 [17:15<16:29,  7.79s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.14 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.37 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 58%|█████▊    | 174/300 [17:20<14:30,  6.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.89 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.10 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 58%|█████▊    | 175/300 [17:25<13:06,  6.29s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.15 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.82 seconds\n",
            "  → Reranker took 0.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 59%|█████▊    | 176/300 [17:29<11:40,  5.65s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.92 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 5.53 seconds\n",
            "  → Reranker took 0.64 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 59%|█████▉    | 177/300 [17:37<13:01,  6.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.47 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.21 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.62 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 59%|█████▉    | 178/300 [17:44<13:13,  6.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.56 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 45\n",
            "  → Generator took 4.44 seconds\n",
            "  → Reranker took 0.76 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 60%|█████▉    | 179/300 [17:51<13:27,  6.68s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.55 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.49 seconds\n",
            "  → Reranker took 0.38 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 60%|██████    | 180/300 [17:56<12:14,  6.12s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 2.92 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 60%|██████    | 181/300 [18:00<10:57,  5.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.78 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.84 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 2.95 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 61%|██████    | 182/300 [18:06<10:56,  5.56s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.25 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.14 seconds\n",
            "  → Reranker took 0.34 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 61%|██████    | 183/300 [18:11<10:29,  5.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.10 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 5.09 seconds\n",
            "  → Reranker took 0.81 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 61%|██████▏   | 184/300 [18:18<11:42,  6.06s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.39 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.11 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 62%|██████▏   | 185/300 [18:23<10:52,  5.67s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.04 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 5.71 seconds\n",
            "  → Reranker took 0.92 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 62%|██████▏   | 186/300 [18:32<12:39,  6.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.98 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 4.18 seconds\n",
            "  → Reranker took 0.75 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 62%|██████▏   | 187/300 [18:39<12:28,  6.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.26 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.69 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.20 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 63%|██████▎   | 188/300 [18:44<11:45,  6.30s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.72 seconds\n",
            "  → Reranker took 0.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 63%|██████▎   | 189/300 [18:48<10:18,  5.57s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.18 seconds\n",
            "  → Reranker took 0.60 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 63%|██████▎   | 190/300 [18:54<10:26,  5.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.85 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.28 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.39 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 64%|██████▎   | 191/300 [19:00<10:20,  5.69s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.46 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.41 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 64%|██████▍   | 192/300 [19:05<09:49,  5.46s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 55\n",
            "  → Generator took 4.23 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 64%|██████▍   | 193/300 [19:11<10:03,  5.64s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.31 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.34 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 65%|██████▍   | 194/300 [19:16<09:51,  5.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.38 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 60\n",
            "  → Generator took 4.43 seconds\n",
            "  → Reranker took 0.27 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 65%|██████▌   | 195/300 [19:22<10:04,  5.75s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.07 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.78 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 65%|██████▌   | 196/300 [19:28<09:47,  5.65s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 4.17 seconds\n",
            "  → Reranker took 1.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 66%|██████▌   | 197/300 [19:35<10:29,  6.11s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.35 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 45\n",
            "  → Generator took 3.81 seconds\n",
            "  → Reranker took 0.69 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 66%|██████▌   | 198/300 [19:41<10:23,  6.11s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.25 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 2.84 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 66%|██████▋   | 199/300 [19:45<09:18,  5.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.73 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.39 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 45\n",
            "  → Generator took 3.31 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 67%|██████▋   | 200/300 [19:50<08:50,  5.30s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "✅ Saved batch 3 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_3_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.63 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 67%|██████▋   | 201/300 [19:55<08:32,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "  → Generator took 4.38 seconds\n",
            "  → Reranker took 0.38 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 67%|██████▋   | 202/300 [20:01<08:50,  5.42s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.86 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 68%|██████▊   | 203/300 [20:05<08:22,  5.18s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 68%|██████▊   | 204/300 [20:11<08:33,  5.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.93 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.34 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 40\n",
            "  → Generator took 2.98 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 68%|██████▊   | 205/300 [20:17<08:41,  5.49s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.94 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.20 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 69%|██████▊   | 206/300 [20:23<08:53,  5.68s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.27 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.26 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 55\n",
            "  → Generator took 5.53 seconds\n",
            "  → Reranker took 0.55 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 69%|██████▉   | 207/300 [20:32<10:08,  6.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.88 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 60\n",
            "  → Generator took 4.71 seconds\n",
            "  → Reranker took 0.54 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 69%|██████▉   | 208/300 [20:38<10:02,  6.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.91 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 50\n",
            "  → Generator took 4.36 seconds\n",
            "  → Reranker took 0.65 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 70%|██████▉   | 209/300 [20:46<10:20,  6.82s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.04 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 70%|███████   | 210/300 [20:52<10:00,  6.67s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.64 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 3.86 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 70%|███████   | 211/300 [20:57<09:21,  6.30s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.11 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.51 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 40\n",
            "  → Generator took 2.67 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 71%|███████   | 212/300 [21:02<08:20,  5.69s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.80 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 4.48 seconds\n",
            "  → Reranker took 0.90 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 71%|███████   | 213/300 [21:09<09:08,  6.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 6.46 seconds\n",
            "  → Reranker took 0.88 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 71%|███████▏  | 214/300 [21:20<10:50,  7.56s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.85 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 55\n",
            "  → Generator took 3.93 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 72%|███████▏  | 215/300 [21:25<09:32,  6.74s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 55\n",
            "  → Generator took 4.11 seconds\n",
            "  → Reranker took 0.51 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 72%|███████▏  | 216/300 [21:31<09:12,  6.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.19 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 45\n",
            "  → Generator took 3.70 seconds\n",
            "  → Reranker took 0.29 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 72%|███████▏  | 217/300 [21:36<08:30,  6.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.82 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.60 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 73%|███████▎  | 218/300 [21:40<07:20,  5.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.88 seconds\n",
            "\n",
            "  → Clusterer took 0.78 seconds | best k = 40\n",
            "  → Generator took 3.14 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 73%|███████▎  | 219/300 [21:46<07:29,  5.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 55\n",
            "  → Generator took 4.03 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 73%|███████▎  | 220/300 [21:51<07:30,  5.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.22 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.92 seconds\n",
            "  → Reranker took 0.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 74%|███████▎  | 221/300 [21:55<06:46,  5.14s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.57 seconds\n",
            "\n",
            "  → Clusterer took 0.26 seconds | best k = 40\n",
            "  → Generator took 2.88 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 74%|███████▍  | 222/300 [22:00<06:23,  4.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.62 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.27 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 74%|███████▍  | 223/300 [22:04<06:12,  4.83s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.76 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 75%|███████▍  | 224/300 [22:08<05:45,  4.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.69 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 3.52 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 75%|███████▌  | 225/300 [22:14<06:04,  4.86s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.86 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.23 seconds | best k = 40\n",
            "  → Generator took 2.95 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 75%|███████▌  | 226/300 [22:19<05:59,  4.86s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.30 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 5.31 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 76%|███████▌  | 227/300 [22:25<06:33,  5.40s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.78 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.00 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 76%|███████▌  | 228/300 [22:30<06:12,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 55\n",
            "  → Generator took 3.95 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 76%|███████▋  | 229/300 [22:35<06:02,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.28 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 2.98 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 77%|███████▋  | 230/300 [22:40<05:51,  5.03s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.59 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 77%|███████▋  | 231/300 [22:43<05:16,  4.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 77%|███████▋  | 232/300 [22:48<05:14,  4.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.14 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.29 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 45\n",
            "  → Generator took 3.62 seconds\n",
            "  → Reranker took 0.29 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 78%|███████▊  | 233/300 [22:54<05:40,  5.09s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.74 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 2.74 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 78%|███████▊  | 234/300 [22:58<05:12,  4.74s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 45\n",
            "  → Generator took 3.56 seconds\n",
            "  → Reranker took 0.30 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 78%|███████▊  | 235/300 [23:03<05:09,  4.76s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.08 seconds\n",
            "  → Reranker took 0.34 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 79%|███████▊  | 236/300 [23:09<05:22,  5.03s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.87 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.40 seconds\n",
            "  → Reranker took 1.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 79%|███████▉  | 237/300 [23:15<05:32,  5.28s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.93 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.69 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 79%|███████▉  | 238/300 [23:19<05:02,  4.88s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.76 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.99 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 80%|███████▉  | 239/300 [23:23<04:56,  4.86s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.28 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 4.44 seconds\n",
            "  → Reranker took 1.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 80%|████████  | 240/300 [23:31<05:37,  5.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.48 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.35 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 80%|████████  | 241/300 [23:36<05:17,  5.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.82 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 4.73 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 81%|████████  | 242/300 [23:41<05:17,  5.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.20 seconds\n",
            "Reduced dimensions in 0.55 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 81%|████████  | 243/300 [23:47<05:09,  5.43s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.83 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 65\n",
            "  → Generator took 4.41 seconds\n",
            "  → Reranker took 0.35 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 81%|████████▏ | 244/300 [23:53<05:17,  5.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.05 seconds\n",
            "Tf-Idf embeddings generated in 0.09 seconds\n",
            "Reduced dimensions in 0.46 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 45\n",
            "  → Generator took 4.18 seconds\n",
            "  → Reranker took 0.65 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 82%|████████▏ | 245/300 [23:59<05:28,  5.97s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.91 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.26 seconds\n",
            "\n",
            "  → Clusterer took 0.41 seconds | best k = 40\n",
            "  → Generator took 3.18 seconds\n",
            "  → Reranker took 0.52 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 82%|████████▏ | 246/300 [24:05<05:18,  5.90s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.28 seconds\n",
            "Tf-Idf embeddings generated in 0.15 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.45 seconds | best k = 40\n",
            "  → Generator took 4.41 seconds\n",
            "  → Reranker took 1.32 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 82%|████████▏ | 247/300 [24:13<05:38,  6.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.38 seconds | best k = 40\n",
            "  → Generator took 2.88 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 83%|████████▎ | 248/300 [24:17<05:04,  5.85s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.90 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 45\n",
            "  → Generator took 3.66 seconds\n",
            "  → Reranker took 0.63 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 83%|████████▎ | 249/300 [24:23<05:00,  5.88s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.34 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.42 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 83%|████████▎ | 250/300 [24:28<04:40,  5.60s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.03 seconds\n",
            "✅ Saved batch 4 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_4_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 6.18 seconds\n",
            "  → Reranker took 1.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 84%|████████▎ | 251/300 [24:38<05:37,  6.88s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.09 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.39 seconds\n",
            "  → Reranker took 0.48 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 84%|████████▍ | 252/300 [24:44<05:12,  6.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.43 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 40\n",
            "  → Generator took 3.00 seconds\n",
            "  → Reranker took 0.37 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 84%|████████▍ | 253/300 [24:49<04:48,  6.13s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.16 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.85 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 85%|████████▍ | 254/300 [24:53<04:10,  5.44s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 85%|████████▌ | 255/300 [24:57<03:46,  5.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.83 seconds\n",
            "  → Reranker took 0.30 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 85%|████████▌ | 256/300 [25:02<03:48,  5.18s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.24 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 86%|████████▌ | 257/300 [25:08<03:43,  5.20s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.40 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 50\n",
            "  → Generator took 3.67 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 86%|████████▌ | 258/300 [25:13<03:39,  5.23s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.04 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.72 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.24 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 86%|████████▋ | 259/300 [25:18<03:37,  5.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.34 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 87%|████████▋ | 260/300 [25:23<03:26,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.91 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.68 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 87%|████████▋ | 261/300 [25:28<03:19,  5.12s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.84 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.60 seconds\n",
            "  → Reranker took 0.64 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 87%|████████▋ | 262/300 [25:34<03:22,  5.33s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.21 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.07 seconds\n",
            "  → Reranker took 1.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 88%|████████▊ | 263/300 [25:40<03:19,  5.39s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.64 seconds\n",
            "  → Reranker took 0.76 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 88%|████████▊ | 264/300 [25:46<03:22,  5.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.40 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 88%|████████▊ | 265/300 [25:50<03:04,  5.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.90 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 40\n",
            "  → Generator took 4.24 seconds\n",
            "  → Reranker took 0.65 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 89%|████████▊ | 266/300 [25:58<03:22,  5.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.04 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.21 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 89%|████████▉ | 267/300 [26:03<03:06,  5.64s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.16 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.22 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 45\n",
            "  → Generator took 3.33 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 89%|████████▉ | 268/300 [26:09<03:05,  5.80s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.02 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 2.60 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 90%|████████▉ | 269/300 [26:13<02:39,  5.14s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 4.38 seconds\n",
            "  → Reranker took 1.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 90%|█████████ | 270/300 [26:20<02:59,  5.97s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.88 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.23 seconds | best k = 40\n",
            "Error: APIError: - HTTP 502 error: <html>\n",
            "<head><title>502 Bad Gateway</title></head>\n",
            "<body>\n",
            "<center><h1>502 Bad Gateway</h1></center>\n",
            "</body>\n",
            "</html>\n",
            " | Request 3 | Retry 0\n",
            "  → Generator took 8.52 seconds\n",
            "  → Reranker took 0.80 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 90%|█████████ | 271/300 [26:33<03:46,  7.81s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.39 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.07 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 91%|█████████ | 272/300 [26:39<03:23,  7.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.43 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.37 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 91%|█████████ | 273/300 [26:43<02:56,  6.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.40 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 91%|█████████▏| 274/300 [26:48<02:34,  5.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.79 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.28 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 92%|█████████▏| 275/300 [26:53<02:24,  5.76s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.64 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 3.61 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 92%|█████████▏| 276/300 [26:58<02:11,  5.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 92%|█████████▏| 277/300 [27:03<02:00,  5.23s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.97 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.91 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 93%|█████████▎| 278/300 [27:07<01:49,  4.96s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 4.42 seconds\n",
            "  → Reranker took 1.05 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 93%|█████████▎| 279/300 [27:14<01:59,  5.68s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.30 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 3.46 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 93%|█████████▎| 280/300 [27:19<01:46,  5.31s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.74 seconds\n",
            "  → Reranker took 0.38 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 94%|█████████▎| 281/300 [27:24<01:39,  5.24s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 4.20 seconds\n",
            "  → Reranker took 0.73 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 94%|█████████▍| 282/300 [27:30<01:40,  5.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.10 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "  → Generator took 3.48 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 94%|█████████▍| 283/300 [27:35<01:32,  5.41s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.84 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 55\n",
            "  → Generator took 4.00 seconds\n",
            "  → Reranker took 0.27 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 95%|█████████▍| 284/300 [27:41<01:28,  5.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 95%|█████████▌| 285/300 [27:46<01:19,  5.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.19 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.72 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 3.75 seconds\n",
            "  → Reranker took 0.70 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 95%|█████████▌| 286/300 [27:52<01:19,  5.68s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.64 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 96%|█████████▌| 287/300 [27:56<01:05,  5.06s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.20 seconds\n",
            "  → Reranker took 0.57 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 96%|█████████▌| 288/300 [28:01<01:01,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.04 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.23 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.56 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 96%|█████████▋| 289/300 [28:05<00:51,  4.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.14 seconds | best k = 55\n",
            "  → Generator took 4.08 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 97%|█████████▋| 290/300 [28:10<00:49,  4.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.75 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.29 seconds\n",
            "  → Reranker took 0.35 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 97%|█████████▋| 291/300 [28:16<00:45,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.49 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 97%|█████████▋| 292/300 [28:21<00:40,  5.07s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.33 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 50\n",
            "  → Generator took 3.33 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 98%|█████████▊| 293/300 [28:25<00:33,  4.83s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.54 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 2.95 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 98%|█████████▊| 294/300 [28:30<00:29,  4.84s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.80 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 4.31 seconds\n",
            "  → Reranker took 1.07 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 98%|█████████▊| 295/300 [28:38<00:28,  5.72s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.03 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.26 seconds | best k = 40\n",
            "  → Generator took 3.00 seconds\n",
            "  → Reranker took 0.27 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 99%|█████████▊| 296/300 [28:43<00:21,  5.41s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.57 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 99%|█████████▉| 297/300 [28:46<00:14,  4.84s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.57 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.64 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 99%|█████████▉| 298/300 [28:51<00:09,  4.73s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.34 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r100%|█████████▉| 299/300 [28:55<00:04,  4.71s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.77 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 45\n",
            "  → Generator took 3.26 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "100%|██████████| 300/300 [29:00<00:00,  5.80s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.22 seconds\n",
            "✅ Saved batch 5 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_5_live.jsonl\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "#@title retrieve second split passages\n",
        "print('Retrieving documents')\n",
        "retriever = Retriever(qa_last_split_df, top_k=200)\n",
        "start = time()\n",
        "dense_df = retriever.retrieve(indexer=\"pinecone\")\n",
        "retriever.save_results(dense_df, \"dense_retr_passages_last_split.csv\")\n",
        "end = time()\n",
        "print(f'Documents Retrieved from Pinecone: {end-start:.2f}')\n",
        "\n",
        "# Retrieve\n",
        "start = time()\n",
        "\n",
        "# OpenSearch retrieval\n",
        "sparse_df = retriever.retrieve(indexer=\"opensearch\")\n",
        "retriever.save_results(sparse_df, \"sparse_retr_passages_last_split.csv\")\n",
        "end = time()\n",
        "print(f'Documents Retrieved from Opensearch: {end-start:.2f}')\n",
        "\n",
        "#@title hybrid retrieval\n",
        "start = time()\n",
        "retriever = Retriever(qa_last_split_df, top_k=200, dense_ind=dense_df, sparse_ind=sparse_df)\n",
        "hybrid_df = retriever.retrieve(indexer=\"merged\")\n",
        "# change the id from 0-200 to 300-500\n",
        "hybrid_df['id'] = hybrid_df['id'] + 300\n",
        "\n",
        "retriever.save_results(hybrid_df, \"hybrid_retr_passages_last_split.csv\")\n",
        "end = time()\n",
        "print(f'Documents Retrieved from Hybrid: {end-start:.2f}')"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "bf5cbb18-83e0-477e-e2e5-c9b7d0bcaf1b",
        "id": "2d2Ck77-yZO1"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Retrieving documents\n",
            "Documents Retrieved from Pinecone: 43.06\n",
            "Documents Retrieved from Opensearch: 62.81\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "100%|██████████| 200/200 [00:17<00:00, 11.35it/s]\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Documents Retrieved from Hybrid: 19.24\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "#@title run second split\n",
        "generator = AugmentedGenerator(model=\"tiiuae/falcon3-10b-instruct\")\n",
        "retrieval_df = hybrid_df\n",
        "data = pd.merge(retrieval_df, qa_last_split_df, on=\"id\", how=\"inner\")\n",
        "\n",
        "topclustrag_last_split = run_TopClustRAG(\n",
        "    data, generator, reranker, n_parallel=10, k_range=range(40, 70, 5))"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "ba44099b-3147-4c22-9f93-0f6927650749",
        "collapsed": true,
        "id": "yqDXSm6ZyZO2"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  0%|          | 0/200 [00:00<?, ?it/s]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 4.43 seconds\n",
            "  → Reranker took 1.43 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  0%|          | 1/200 [00:07<26:21,  7.95s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.67 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  1%|          | 2/200 [00:11<18:34,  5.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.94 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  2%|▏         | 3/200 [00:16<16:22,  4.99s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.77 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 3.01 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  2%|▏         | 4/200 [00:21<16:06,  4.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.00 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.37 seconds\n",
            "  → Reranker took 0.70 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  2%|▎         | 5/200 [00:26<16:47,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.12 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.23 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.33 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  3%|▎         | 6/200 [00:31<16:30,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.17 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.80 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  4%|▎         | 7/200 [00:35<15:07,  4.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 55\n",
            "  → Generator took 4.05 seconds\n",
            "  → Reranker took 0.58 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  4%|▍         | 8/200 [00:41<16:10,  5.05s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.83 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 3.12 seconds\n",
            "  → Reranker took 0.34 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  4%|▍         | 9/200 [00:47<16:46,  5.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.64 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 60\n",
            "  → Generator took 4.25 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  5%|▌         | 10/200 [00:52<17:14,  5.45s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.77 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  6%|▌         | 11/200 [00:57<16:00,  5.08s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.93 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.93 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  6%|▌         | 12/200 [01:02<16:12,  5.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.05 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.90 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  6%|▋         | 13/200 [01:07<15:33,  4.99s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.15 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.51 seconds\n",
            "\n",
            "  → Clusterer took 0.36 seconds | best k = 60\n",
            "  → Generator took 4.55 seconds\n",
            "  → Reranker took 0.44 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  7%|▋         | 14/200 [01:14<17:39,  5.69s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.37 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.17 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  8%|▊         | 15/200 [01:18<16:24,  5.32s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.73 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.34 seconds\n",
            "  → Reranker took 0.72 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  8%|▊         | 16/200 [01:24<16:13,  5.29s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.83 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.00 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  8%|▊         | 17/200 [01:28<15:21,  5.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.36 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r  9%|▉         | 18/200 [01:33<15:36,  5.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.22 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.66 seconds\n",
            "\n",
            "  → Clusterer took 0.38 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.40 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 10%|▉         | 19/200 [01:40<16:34,  5.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.54 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.99 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 10%|█         | 20/200 [01:44<15:12,  5.07s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.29 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 10%|█         | 21/200 [01:48<14:36,  4.90s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.85 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.29 seconds | best k = 40\n",
            "  → Generator took 4.16 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 11%|█         | 22/200 [01:54<15:01,  5.06s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 55\n",
            "  → Generator took 4.93 seconds\n",
            "  → Reranker took 0.46 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 12%|█▏        | 23/200 [02:02<17:53,  6.06s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.68 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 40\n",
            "  → Generator took 4.35 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 12%|█▏        | 24/200 [02:09<18:22,  6.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.92 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 5.40 seconds\n",
            "  → Reranker took 0.54 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 12%|█▎        | 25/200 [02:18<21:07,  7.24s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 3.23 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.35 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 13%|█▎        | 26/200 [02:23<18:38,  6.43s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.83 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 2.93 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 14%|█▎        | 27/200 [02:27<16:24,  5.69s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.18 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 14%|█▍        | 28/200 [02:32<15:23,  5.37s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.93 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 2.97 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 14%|█▍        | 29/200 [02:36<14:28,  5.08s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.23 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 15%|█▌        | 30/200 [02:40<13:40,  4.83s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 16%|█▌        | 31/200 [02:45<13:39,  4.85s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.92 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 16%|█▌        | 32/200 [02:49<13:01,  4.65s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.77 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.63 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 2.94 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 16%|█▋        | 33/200 [02:54<12:53,  4.63s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.21 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 17%|█▋        | 34/200 [02:58<12:31,  4.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 4.49 seconds\n",
            "  → Reranker took 0.50 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 18%|█▊        | 35/200 [03:05<14:04,  5.12s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.12 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 50\n",
            "  → Generator took 4.58 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 18%|█▊        | 36/200 [03:10<14:36,  5.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 65\n",
            "  → Generator took 8.69 seconds\n",
            "  → Reranker took 0.33 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 18%|█▊        | 37/200 [03:23<20:22,  7.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 3.11 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 2.62 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 19%|█▉        | 38/200 [03:27<17:20,  6.42s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 60\n",
            "  → Generator took 8.05 seconds\n",
            "  → Reranker took 1.43 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 20%|█▉        | 39/200 [03:38<21:17,  7.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.56 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.18 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 20%|██        | 40/200 [03:43<18:06,  6.79s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.42 seconds | best k = 45\n",
            "  → Generator took 5.19 seconds\n",
            "  → Reranker took 0.71 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 20%|██        | 41/200 [03:50<18:38,  7.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.03 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 21%|██        | 42/200 [03:55<16:44,  6.36s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.12 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.27 seconds\n",
            "\n",
            "  → Clusterer took 0.40 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 22%|██▏       | 43/200 [04:01<16:03,  6.14s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.60 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.68 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 22%|██▏       | 44/200 [04:04<14:04,  5.41s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.67 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.88 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 22%|██▎       | 45/200 [04:10<13:55,  5.39s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 23%|██▎       | 46/200 [04:15<13:42,  5.34s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.65 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.17 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 24%|██▎       | 47/200 [04:19<12:59,  5.09s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.87 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 40\n",
            "  → Generator took 2.91 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 24%|██▍       | 48/200 [04:24<12:19,  4.86s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.70 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.47 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 24%|██▍       | 49/200 [04:29<12:58,  5.15s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.60 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 25%|██▌       | 50/200 [04:34<12:05,  4.84s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "✅ Saved batch 0 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_0_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.23 seconds\n",
            "\n",
            "  → Clusterer took 0.43 seconds | best k = 40\n",
            "  → Generator took 2.70 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 26%|██▌       | 51/200 [04:38<11:23,  4.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 55\n",
            "  → Generator took 4.34 seconds\n",
            "  → Reranker took 1.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 26%|██▌       | 52/200 [04:45<13:17,  5.39s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.38 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 45\n",
            "  → Generator took 3.18 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 26%|██▋       | 53/200 [04:50<13:10,  5.38s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.56 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.08 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.84 seconds\n",
            "  → Reranker took 0.28 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 27%|██▋       | 54/200 [04:56<13:02,  5.36s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.89 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.29 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 28%|██▊       | 55/200 [05:00<12:25,  5.14s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.91 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 40\n",
            "  → Generator took 2.76 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 28%|██▊       | 56/200 [05:05<11:47,  4.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.87 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 3.08 seconds\n",
            "  → Reranker took 0.45 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 28%|██▊       | 57/200 [05:09<11:39,  4.89s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.94 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 60\n",
            "  → Generator took 4.26 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 29%|██▉       | 58/200 [05:15<12:03,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.73 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 40\n",
            "  → Generator took 3.24 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 30%|██▉       | 59/200 [05:20<11:41,  4.98s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.91 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 30%|███       | 60/200 [05:24<11:28,  4.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.15 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.62 seconds\n",
            "\n",
            "  → Clusterer took 0.29 seconds | best k = 50\n",
            "  → Generator took 3.23 seconds\n",
            "  → Reranker took 0.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 30%|███       | 61/200 [05:29<11:25,  4.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.14 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 31%|███       | 62/200 [05:35<11:36,  5.05s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.52 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.23 seconds | best k = 40\n",
            "  → Generator took 3.00 seconds\n",
            "  → Reranker took 0.42 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 32%|███▏      | 63/200 [05:40<11:55,  5.22s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.74 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.66 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 32%|███▏      | 64/200 [05:44<10:45,  4.75s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.02 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 2.92 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 32%|███▎      | 65/200 [05:48<10:09,  4.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.71 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 4.91 seconds\n",
            "  → Reranker took 0.44 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 33%|███▎      | 66/200 [05:55<11:38,  5.21s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.10 seconds\n",
            "Tf-Idf embeddings generated in 0.02 seconds\n",
            "Reduced dimensions in 0.08 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.95 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 34%|███▎      | 67/200 [05:59<11:01,  4.98s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 3.78 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 34%|███▍      | 68/200 [06:04<10:46,  4.90s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.60 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 2.64 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 34%|███▍      | 69/200 [06:08<10:16,  4.71s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.93 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 35%|███▌      | 70/200 [06:12<09:56,  4.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.89 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.56 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 36%|███▌      | 71/200 [06:16<09:10,  4.26s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.72 seconds\n",
            "  → Reranker took 0.30 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 36%|███▌      | 72/200 [06:22<10:01,  4.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.30 seconds\n",
            "  → Reranker took 0.43 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 36%|███▋      | 73/200 [06:27<10:19,  4.88s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.19 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "  → Generator took 3.71 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 37%|███▋      | 74/200 [06:32<10:08,  4.83s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.11 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 38%|███▊      | 75/200 [06:36<09:41,  4.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.67 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 60\n",
            "  → Generator took 4.63 seconds\n",
            "  → Reranker took 1.68 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 38%|███▊      | 76/200 [06:44<11:35,  5.61s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.17 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.27 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 40\n",
            "  → Generator took 4.84 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 38%|███▊      | 77/200 [06:50<11:49,  5.77s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.62 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.70 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 39%|███▉      | 78/200 [06:54<10:25,  5.12s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.27 seconds\n",
            "  → Reranker took 0.94 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 40%|███▉      | 79/200 [07:00<11:01,  5.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.75 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 60\n",
            "  → Generator took 4.27 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 40%|████      | 80/200 [07:05<10:56,  5.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 4.70 seconds\n",
            "  → Reranker took 0.71 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 40%|████      | 81/200 [07:13<12:20,  6.22s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.18 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 65\n",
            "  → Generator took 4.81 seconds\n",
            "  → Reranker took 0.35 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 41%|████      | 82/200 [07:20<12:15,  6.23s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.74 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.65 seconds\n",
            "  → Reranker took 1.02 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 42%|████▏     | 83/200 [07:26<12:19,  6.32s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.50 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.82 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 42%|████▏     | 84/200 [07:30<10:45,  5.57s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.91 seconds\n",
            "  → Reranker took 1.86 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 42%|████▎     | 85/200 [07:37<11:20,  5.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.93 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 43%|████▎     | 86/200 [07:41<10:29,  5.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.00 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 4.76 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 44%|████▎     | 87/200 [07:48<10:51,  5.76s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.94 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.58 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.43 seconds\n",
            "  → Reranker took 0.68 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 44%|████▍     | 88/200 [07:54<11:05,  5.95s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.23 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.15 seconds | best k = 40\n",
            "  → Generator took 3.23 seconds\n",
            "  → Reranker took 0.64 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 44%|████▍     | 89/200 [07:59<10:19,  5.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.16 seconds | best k = 50\n",
            "  → Generator took 3.98 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 45%|████▌     | 90/200 [08:04<10:16,  5.60s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.12 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 46%|████▌     | 91/200 [08:10<10:01,  5.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.43 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.08 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 46%|████▌     | 92/200 [08:14<09:20,  5.19s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.23 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 40\n",
            "  → Generator took 3.59 seconds\n",
            "  → Reranker took 0.48 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 46%|████▋     | 93/200 [08:20<09:30,  5.33s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.06 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 40\n",
            "  → Generator took 3.29 seconds\n",
            "  → Reranker took 0.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 47%|████▋     | 94/200 [08:24<08:54,  5.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.56 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.43 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 48%|████▊     | 95/200 [08:29<08:29,  4.85s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.71 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 48%|████▊     | 96/200 [08:34<08:54,  5.14s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.31 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.20 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 3.91 seconds\n",
            "  → Reranker took 0.90 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 48%|████▊     | 97/200 [08:42<10:23,  6.05s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.94 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 50\n",
            "  → Generator took 3.58 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 49%|████▉     | 98/200 [08:48<09:48,  5.77s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.97 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.77 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 50%|████▉     | 99/200 [08:53<09:18,  5.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.57 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.33 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 3.21 seconds\n",
            "  → Reranker took 0.44 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 50%|█████     | 100/200 [08:58<09:18,  5.58s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.31 seconds\n",
            "✅ Saved batch 1 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_1_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.57 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 4.69 seconds\n",
            "  → Reranker took 1.71 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 50%|█████     | 101/200 [09:07<10:44,  6.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.30 seconds\n",
            "Tf-Idf embeddings generated in 0.27 seconds\n",
            "Reduced dimensions in 1.43 seconds\n",
            "\n",
            "  → Clusterer took 0.43 seconds | best k = 50\n",
            "  → Generator took 6.75 seconds\n",
            "  → Reranker took 3.56 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 51%|█████     | 102/200 [09:21<14:21,  8.79s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.65 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.36 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 3.55 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 52%|█████▏    | 103/200 [09:26<12:32,  7.75s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.78 seconds\n",
            "Tf-Idf embeddings generated in 0.09 seconds\n",
            "Reduced dimensions in 0.25 seconds\n",
            "\n",
            "  → Clusterer took 0.36 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.29 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 52%|█████▏    | 104/200 [09:33<11:36,  7.26s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.06 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 2.26 seconds\n",
            "\n",
            "  → Clusterer took 0.49 seconds | best k = 55\n",
            "  → Generator took 3.90 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 52%|█████▎    | 105/200 [09:41<11:53,  7.51s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.97 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.52 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 4.88 seconds\n",
            "  → Reranker took 0.32 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 53%|█████▎    | 106/200 [09:48<11:31,  7.36s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.89 seconds\n",
            "Tf-Idf embeddings generated in 0.11 seconds\n",
            "Reduced dimensions in 1.91 seconds\n",
            "\n",
            "  → Clusterer took 0.60 seconds | best k = 40\n",
            "  → Generator took 3.56 seconds\n",
            "  → Reranker took 0.56 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 54%|█████▎    | 107/200 [09:55<11:33,  7.45s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.92 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 65\n",
            "  → Generator took 4.36 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 54%|█████▍    | 108/200 [10:01<10:31,  6.87s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.68 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.23 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 55\n",
            "  → Generator took 3.83 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 55%|█████▍    | 109/200 [10:06<09:37,  6.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.66 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.20 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 65\n",
            "  → Generator took 5.04 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 55%|█████▌    | 110/200 [10:13<09:39,  6.44s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.30 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 56%|█████▌    | 111/200 [10:17<08:45,  5.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.89 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.15 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 56%|█████▌    | 112/200 [10:22<08:05,  5.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.18 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 56%|█████▋    | 113/200 [10:27<07:39,  5.28s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.31 seconds\n",
            "\n",
            "  → Clusterer took 0.38 seconds | best k = 40\n",
            "  → Generator took 3.70 seconds\n",
            "  → Reranker took 0.52 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 57%|█████▋    | 114/200 [10:33<08:03,  5.62s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.43 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 4.37 seconds\n",
            "  → Reranker took 1.09 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 57%|█████▊    | 115/200 [10:40<08:33,  6.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.13 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.62 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 55\n",
            "  → Generator took 5.95 seconds\n",
            "  → Reranker took 0.66 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 58%|█████▊    | 116/200 [10:49<09:48,  7.00s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.96 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 58%|█████▊    | 117/200 [10:53<08:25,  6.09s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.34 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 40\n",
            "  → Generator took 3.79 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 59%|█████▉    | 118/200 [10:59<08:04,  5.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.84 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 60\n",
            "  → Generator took 4.23 seconds\n",
            "  → Reranker took 0.08 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 60%|█████▉    | 119/200 [11:04<07:45,  5.75s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.62 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.67 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 60%|██████    | 120/200 [11:08<06:47,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.25 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 60%|██████    | 121/200 [11:13<06:47,  5.16s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.40 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 4.24 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 61%|██████    | 122/200 [11:19<06:51,  5.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.76 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 55\n",
            "  → Generator took 3.82 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 62%|██████▏   | 123/200 [11:24<06:40,  5.20s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 60\n",
            "  → Generator took 4.09 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 62%|██████▏   | 124/200 [11:29<06:45,  5.34s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.37 seconds\n",
            "\n",
            "  → Clusterer took 0.37 seconds | best k = 40\n",
            "  → Generator took 3.13 seconds\n",
            "  → Reranker took 0.40 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 62%|██████▎   | 125/200 [11:35<06:43,  5.39s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.14 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.20 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 45\n",
            "  → Generator took 4.62 seconds\n",
            "  → Reranker took 0.81 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 63%|██████▎   | 126/200 [11:41<07:07,  5.78s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.79 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.19 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.99 seconds\n",
            "  → Reranker took 0.23 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 64%|██████▎   | 127/200 [11:46<06:41,  5.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.21 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.37 seconds\n",
            "\n",
            "  → Clusterer took 0.38 seconds | best k = 50\n",
            "  → Generator took 4.63 seconds\n",
            "  → Reranker took 0.71 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 64%|██████▍   | 128/200 [11:54<07:21,  6.13s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.44 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.10 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 50\n",
            "  → Generator took 4.14 seconds\n",
            "  → Reranker took 1.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 64%|██████▍   | 129/200 [12:01<07:38,  6.45s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.54 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 4.34 seconds\n",
            "  → Reranker took 1.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 65%|██████▌   | 130/200 [12:08<07:44,  6.64s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.37 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.12 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 66%|██████▌   | 131/200 [12:12<06:49,  5.94s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.71 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 3.06 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 66%|██████▌   | 132/200 [12:18<06:27,  5.70s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.13 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.20 seconds\n",
            "  → Reranker took 0.32 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 66%|██████▋   | 133/200 [12:22<06:05,  5.46s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 55\n",
            "  → Generator took 3.74 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 67%|██████▋   | 134/200 [12:28<05:57,  5.42s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.02 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 68%|██████▊   | 135/200 [12:33<05:44,  5.30s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.37 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.37 seconds\n",
            "  → Reranker took 0.63 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 68%|██████▊   | 136/200 [12:38<05:33,  5.21s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.98 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 55\n",
            "  → Generator took 3.74 seconds\n",
            "  → Reranker took 0.11 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 68%|██████▊   | 137/200 [12:44<05:42,  5.44s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.67 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 2.54 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 69%|██████▉   | 138/200 [12:47<05:01,  4.87s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.94 seconds\n",
            "  → Reranker took 0.17 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 70%|██████▉   | 139/200 [12:52<04:44,  4.66s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.72 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 1.01 seconds\n",
            "\n",
            "  → Clusterer took 0.39 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 70%|███████   | 140/200 [12:57<04:56,  4.93s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.83 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 50\n",
            "  → Generator took 3.61 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 70%|███████   | 141/200 [13:03<05:00,  5.10s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.25 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 5.35 seconds\n",
            "  → Reranker took 1.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 71%|███████   | 142/200 [13:10<05:39,  5.85s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.90 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 2.76 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 72%|███████▏  | 143/200 [13:14<05:05,  5.36s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.94 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.98 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 72%|███████▏  | 144/200 [13:18<04:36,  4.94s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.39 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 2.87 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 72%|███████▎  | 145/200 [13:23<04:20,  4.73s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.22 seconds | best k = 45\n",
            "  → Generator took 3.06 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 73%|███████▎  | 146/200 [13:27<04:15,  4.74s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.15 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 55\n",
            "  → Generator took 3.86 seconds\n",
            "  → Reranker took 0.30 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 74%|███████▎  | 147/200 [13:33<04:24,  4.99s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.80 seconds\n",
            "Tf-Idf embeddings generated in 0.15 seconds\n",
            "Reduced dimensions in 0.57 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 74%|███████▍  | 148/200 [13:38<04:15,  4.91s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.11 seconds\n",
            "Reduced dimensions in 0.79 seconds\n",
            "\n",
            "  → Clusterer took 0.44 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.30 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 74%|███████▍  | 149/200 [13:43<04:18,  5.07s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.75 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 45\n",
            "  → Generator took 3.17 seconds\n",
            "  → Reranker took 0.10 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 75%|███████▌  | 150/200 [13:48<04:06,  4.92s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.79 seconds\n",
            "✅ Saved batch 2 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_2_live.jsonl\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.28 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 5.63 seconds\n",
            "  → Reranker took 1.56 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 76%|███████▌  | 151/200 [13:57<05:05,  6.24s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.60 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.30 seconds\n",
            "\n",
            "  → Clusterer took 0.40 seconds | best k = 40\n",
            "  → Generator took 2.67 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 76%|███████▌  | 152/200 [14:01<04:31,  5.65s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.70 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.67 seconds\n",
            "  → Reranker took 0.50 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 76%|███████▋  | 153/200 [14:07<04:21,  5.56s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.88 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.65 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 77%|███████▋  | 154/200 [14:12<04:10,  5.45s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.09 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.29 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 45\n",
            "  → Generator took 4.78 seconds\n",
            "  → Reranker took 1.01 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 78%|███████▊  | 155/200 [14:19<04:31,  6.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.91 seconds\n",
            "  → Reranker took 1.04 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 78%|███████▊  | 156/200 [14:25<04:26,  6.05s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.73 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.20 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 4.17 seconds\n",
            "  → Reranker took 0.56 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 78%|███████▊  | 157/200 [14:32<04:33,  6.35s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.86 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 40\n",
            "  → Generator took 3.21 seconds\n",
            "  → Reranker took 0.21 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 79%|███████▉  | 158/200 [14:37<04:02,  5.78s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.74 seconds\n",
            "Reduced dimensions in 0.28 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 40\n",
            "  → Generator took 3.02 seconds\n",
            "  → Reranker took 0.32 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 80%|███████▉  | 159/200 [14:43<03:58,  5.81s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.20 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.33 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 40\n",
            "  → Generator took 2.93 seconds\n",
            "  → Reranker took 0.18 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 80%|████████  | 160/200 [14:48<03:41,  5.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 40\n",
            "  → Generator took 3.03 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 80%|████████  | 161/200 [14:52<03:22,  5.19s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.58 seconds\n",
            "Tf-Idf embeddings generated in 0.11 seconds\n",
            "Reduced dimensions in 0.57 seconds\n",
            "\n",
            "  → Clusterer took 0.47 seconds | best k = 40\n",
            "  → Generator took 3.04 seconds\n",
            "  → Reranker took 0.70 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 81%|████████  | 162/200 [14:58<03:30,  5.53s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.44 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.32 seconds\n",
            "\n",
            "  → Clusterer took 0.29 seconds | best k = 40\n",
            "  → Generator took 3.16 seconds\n",
            "  → Reranker took 0.75 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 82%|████████▏ | 163/200 [15:04<03:25,  5.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.30 seconds\n",
            "Reduced dimensions in 2.42 seconds\n",
            "\n",
            "  → Clusterer took 1.44 seconds | best k = 40\n",
            "  → Generator took 2.94 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 82%|████████▏ | 164/200 [15:12<03:43,  6.20s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.10 seconds\n",
            "Reduced dimensions in 0.27 seconds\n",
            "\n",
            "  → Clusterer took 0.31 seconds | best k = 45\n",
            "  → Generator took 3.77 seconds\n",
            "  → Reranker took 0.14 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 82%|████████▎ | 165/200 [15:17<03:30,  6.02s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.99 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.98 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 83%|████████▎ | 166/200 [15:22<03:10,  5.61s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.11 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 3.12 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 84%|████████▎ | 167/200 [15:28<03:07,  5.68s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 2.12 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 45\n",
            "  → Generator took 3.53 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 84%|████████▍ | 168/200 [15:32<02:52,  5.39s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.60 seconds\n",
            "Tf-Idf embeddings generated in 0.19 seconds\n",
            "Reduced dimensions in 1.48 seconds\n",
            "\n",
            "  → Clusterer took 0.41 seconds | best k = 40\n",
            "  → Generator took 4.27 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 84%|████████▍ | 169/200 [15:40<03:09,  6.11s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.24 seconds\n",
            "Tf-Idf embeddings generated in 0.12 seconds\n",
            "Reduced dimensions in 0.49 seconds\n",
            "\n",
            "  → Clusterer took 0.35 seconds | best k = 55\n",
            "  → Generator took 5.76 seconds\n",
            "  → Reranker took 2.66 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 85%|████████▌ | 170/200 [15:50<03:40,  7.36s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.87 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.24 seconds | best k = 40\n",
            "  → Generator took 3.16 seconds\n",
            "  → Reranker took 0.24 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 86%|████████▌ | 171/200 [15:55<03:09,  6.55s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.81 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.44 seconds\n",
            "\n",
            "  → Clusterer took 0.27 seconds | best k = 40\n",
            "  → Generator took 3.10 seconds\n",
            "  → Reranker took 0.96 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 86%|████████▌ | 172/200 [16:02<03:03,  6.54s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.67 seconds\n",
            "Tf-Idf embeddings generated in 0.08 seconds\n",
            "Reduced dimensions in 0.49 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.34 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 86%|████████▋ | 173/200 [16:07<02:47,  6.22s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.25 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.16 seconds\n",
            "  → Reranker took 0.33 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 87%|████████▋ | 174/200 [16:12<02:29,  5.75s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.82 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.90 seconds\n",
            "  → Reranker took 0.22 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 88%|████████▊ | 175/200 [16:16<02:13,  5.32s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.79 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.49 seconds\n",
            "\n",
            "  → Clusterer took 0.28 seconds | best k = 40\n",
            "  → Generator took 2.99 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 88%|████████▊ | 176/200 [16:21<02:05,  5.22s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.21 seconds | best k = 55\n",
            "  → Generator took 3.70 seconds\n",
            "  → Reranker took 0.50 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 88%|████████▊ | 177/200 [16:27<02:03,  5.39s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.16 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.54 seconds\n",
            "\n",
            "  → Clusterer took 0.33 seconds | best k = 40\n",
            "  → Generator took 3.29 seconds\n",
            "  → Reranker took 0.15 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 89%|████████▉ | 178/200 [16:33<02:00,  5.48s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.31 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.30 seconds | best k = 50\n",
            "  → Generator took 4.72 seconds\n",
            "  → Reranker took 0.90 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 90%|████████▉ | 179/200 [16:40<02:07,  6.08s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.37 seconds\n",
            "Tf-Idf embeddings generated in 0.41 seconds\n",
            "Reduced dimensions in 3.02 seconds\n",
            "\n",
            "  → Clusterer took 0.61 seconds | best k = 40\n",
            "  → Generator took 3.12 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 90%|█████████ | 180/200 [16:48<02:11,  6.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.61 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.14 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 40\n",
            "  → Generator took 2.92 seconds\n",
            "  → Reranker took 0.13 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 90%|█████████ | 181/200 [16:52<01:51,  5.87s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.76 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.00 seconds\n",
            "  → Reranker took 0.26 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 91%|█████████ | 182/200 [16:57<01:38,  5.46s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.84 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 5.96 seconds\n",
            "  → Reranker took 1.34 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 92%|█████████▏| 183/200 [17:06<01:50,  6.52s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.29 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.11 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 45\n",
            "  → Generator took 3.16 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 92%|█████████▏| 184/200 [17:10<01:36,  6.06s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.27 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 4.15 seconds\n",
            "  → Reranker took 0.62 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 92%|█████████▎| 185/200 [17:17<01:31,  6.12s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.13 seconds\n",
            "Tf-Idf embeddings generated in 0.03 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.20 seconds | best k = 55\n",
            "  → Generator took 5.46 seconds\n",
            "  → Reranker took 0.75 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 93%|█████████▎| 186/200 [17:24<01:30,  6.47s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.69 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 4.01 seconds\n",
            "  → Reranker took 0.36 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 94%|█████████▎| 187/200 [17:30<01:21,  6.26s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.05 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 55\n",
            "  → Generator took 4.23 seconds\n",
            "  → Reranker took 0.16 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 94%|█████████▍| 188/200 [17:36<01:14,  6.18s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.20 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 50\n",
            "Error: APIError: - HTTP 502 error: <html>\n",
            "<head><title>502 Bad Gateway</title></head>\n",
            "<body>\n",
            "<center><h1>502 Bad Gateway</h1></center>\n",
            "</body>\n",
            "</html>\n",
            " | Request 3 | Retry 0\n",
            "  → Generator took 7.68 seconds\n",
            "  → Reranker took 0.20 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 94%|█████████▍| 189/200 [17:45<01:16,  6.99s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.63 seconds\n",
            "Tf-Idf embeddings generated in 0.07 seconds\n",
            "Reduced dimensions in 0.31 seconds\n",
            "\n",
            "  → Clusterer took 0.32 seconds | best k = 40\n",
            "  → Generator took 3.05 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 95%|█████████▌| 190/200 [17:49<01:01,  6.20s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.43 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 96%|█████████▌| 191/200 [17:54<00:52,  5.88s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.07 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.09 seconds\n",
            "\n",
            "  → Clusterer took 0.17 seconds | best k = 40\n",
            "  → Generator took 3.08 seconds\n",
            "  → Reranker took 0.47 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 96%|█████████▌| 192/200 [17:59<00:45,  5.72s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.47 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.17 seconds\n",
            "\n",
            "  → Clusterer took 0.25 seconds | best k = 40\n",
            "  → Generator took 3.25 seconds\n",
            "  → Reranker took 0.39 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 96%|█████████▋| 193/200 [18:05<00:39,  5.59s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.16 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.12 seconds\n",
            "\n",
            "  → Clusterer took 0.18 seconds | best k = 40\n",
            "  → Generator took 2.95 seconds\n",
            "  → Reranker took 0.19 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 97%|█████████▋| 194/200 [18:09<00:31,  5.26s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.01 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.15 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 3.09 seconds\n",
            "  → Reranker took 0.25 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 98%|█████████▊| 195/200 [18:15<00:26,  5.27s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.55 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.16 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 4.82 seconds\n",
            "  → Reranker took 0.78 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 98%|█████████▊| 196/200 [18:22<00:24,  6.06s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 1.90 seconds\n",
            "Tf-Idf embeddings generated in 0.05 seconds\n",
            "Reduced dimensions in 0.24 seconds\n",
            "\n",
            "  → Clusterer took 0.34 seconds | best k = 40\n",
            "  → Generator took 3.99 seconds\n",
            "  → Reranker took 0.82 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 98%|█████████▊| 197/200 [18:29<00:18,  6.17s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.98 seconds\n",
            "Tf-Idf embeddings generated in 0.22 seconds\n",
            "Reduced dimensions in 0.13 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.83 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r 99%|█████████▉| 198/200 [18:33<00:11,  5.50s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.57 seconds\n",
            "Tf-Idf embeddings generated in 0.04 seconds\n",
            "Reduced dimensions in 0.18 seconds\n",
            "\n",
            "  → Clusterer took 0.19 seconds | best k = 40\n",
            "  → Generator took 2.93 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r100%|█████████▉| 199/200 [18:37<00:05,  5.04s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.62 seconds\n",
            "Tf-Idf embeddings generated in 0.06 seconds\n",
            "Reduced dimensions in 0.38 seconds\n",
            "\n",
            "  → Clusterer took 0.41 seconds | best k = 40\n",
            "  → Generator took 2.75 seconds\n",
            "  → Reranker took 0.00 seconds\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "100%|██████████| 200/200 [18:41<00:00,  5.61s/it]"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "  → Final prompt took 0.59 seconds\n",
            "✅ Saved batch 3 to /content/drive/MyDrive/SIGIR2025_LiveRAG/batch_3_live.jsonl\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\n"
          ]
        }
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "#@title merge the two results\n",
        "topclustrag_final_df = pd.concat([topclustrag_first_split, topclustrag_last_split], ignore_index=True)\n",
        "topclustrag_final_df.to_json(\"liverag_live_500.jsonl\", orient=\"records\", lines=True)\n",
        "\n",
        "topclustrag_final_df.head(3)"
      ],
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 196
        },
        "outputId": "c5bd4700-38d2-466c-86ee-bcba741aa1c8",
        "id": "BIOQjDN4yZO5"
      },
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "   id                                           question  \\\n",
              "0   0   How deep can fish survive in the ocean trenches?   \n",
              "1   1  Based on temperature considerations alone, is ...   \n",
              "2   2  What major acts performed at the Brighton Hipp...   \n",
              "\n",
              "                                            passages  \\\n",
              "0  [{'passage': 'Fish can survive down to around ...   \n",
              "1  [{'passage': 'Based on temperature considerati...   \n",
              "2  [{'passage': 'During its peak years, the Brigh...   \n",
              "\n",
              "                                        final_prompt  \\\n",
              "0  Answer the question using only the context bel...   \n",
              "1  Answer the question using only the context bel...   \n",
              "2  Answer the question using only the context bel...   \n",
              "\n",
              "                                              answer  \n",
              "0  Fish can survive down to around 27,000 feet (8...  \n",
              "1  Based on temperature considerations alone, Mar...  \n",
              "2  During its peak years, the Brighton Hippodrome...  "
            ],
            "text/html": [
              "\n",
              "  <div id=\"df-4af13a3a-08c9-4334-8263-d8f0d00fe623\" class=\"colab-df-container\">\n",
              "    <div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>id</th>\n",
              "      <th>question</th>\n",
              "      <th>passages</th>\n",
              "      <th>final_prompt</th>\n",
              "      <th>answer</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>0</td>\n",
              "      <td>How deep can fish survive in the ocean trenches?</td>\n",
              "      <td>[{'passage': 'Fish can survive down to around ...</td>\n",
              "      <td>Answer the question using only the context bel...</td>\n",
              "      <td>Fish can survive down to around 27,000 feet (8...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1</td>\n",
              "      <td>Based on temperature considerations alone, is ...</td>\n",
              "      <td>[{'passage': 'Based on temperature considerati...</td>\n",
              "      <td>Answer the question using only the context bel...</td>\n",
              "      <td>Based on temperature considerations alone, Mar...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>2</td>\n",
              "      <td>What major acts performed at the Brighton Hipp...</td>\n",
              "      <td>[{'passage': 'During its peak years, the Brigh...</td>\n",
              "      <td>Answer the question using only the context bel...</td>\n",
              "      <td>During its peak years, the Brighton Hippodrome...</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "</div>\n",
              "    <div class=\"colab-df-buttons\">\n",
              "\n",
              "  <div class=\"colab-df-container\">\n",
              "    <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-4af13a3a-08c9-4334-8263-d8f0d00fe623')\"\n",
              "            title=\"Convert this dataframe to an interactive table.\"\n",
              "            style=\"display:none;\">\n",
              "\n",
              "  <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\" viewBox=\"0 -960 960 960\">\n",
              "    <path d=\"M120-120v-720h720v720H120Zm60-500h600v-160H180v160Zm220 220h160v-160H400v160Zm0 220h160v-160H400v160ZM180-400h160v-160H180v160Zm440 0h160v-160H620v160ZM180-180h160v-160H180v160Zm440 0h160v-160H620v160Z\"/>\n",
              "  </svg>\n",
              "    </button>\n",
              "\n",
              "  <style>\n",
              "    .colab-df-container {\n",
              "      display:flex;\n",
              "      gap: 12px;\n",
              "    }\n",
              "\n",
              "    .colab-df-convert {\n",
              "      background-color: #E8F0FE;\n",
              "      border: none;\n",
              "      border-radius: 50%;\n",
              "      cursor: pointer;\n",
              "      display: none;\n",
              "      fill: #1967D2;\n",
              "      height: 32px;\n",
              "      padding: 0 0 0 0;\n",
              "      width: 32px;\n",
              "    }\n",
              "\n",
              "    .colab-df-convert:hover {\n",
              "      background-color: #E2EBFA;\n",
              "      box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
              "      fill: #174EA6;\n",
              "    }\n",
              "\n",
              "    .colab-df-buttons div {\n",
              "      margin-bottom: 4px;\n",
              "    }\n",
              "\n",
              "    [theme=dark] .colab-df-convert {\n",
              "      background-color: #3B4455;\n",
              "      fill: #D2E3FC;\n",
              "    }\n",
              "\n",
              "    [theme=dark] .colab-df-convert:hover {\n",
              "      background-color: #434B5C;\n",
              "      box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
              "      filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
              "      fill: #FFFFFF;\n",
              "    }\n",
              "  </style>\n",
              "\n",
              "    <script>\n",
              "      const buttonEl =\n",
              "        document.querySelector('#df-4af13a3a-08c9-4334-8263-d8f0d00fe623 button.colab-df-convert');\n",
              "      buttonEl.style.display =\n",
              "        google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
              "\n",
              "      async function convertToInteractive(key) {\n",
              "        const element = document.querySelector('#df-4af13a3a-08c9-4334-8263-d8f0d00fe623');\n",
              "        const dataTable =\n",
              "          await google.colab.kernel.invokeFunction('convertToInteractive',\n",
              "                                                    [key], {});\n",
              "        if (!dataTable) return;\n",
              "\n",
              "        const docLinkHtml = 'Like what you see? Visit the ' +\n",
              "          '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n",
              "          + ' to learn more about interactive tables.';\n",
              "        element.innerHTML = '';\n",
              "        dataTable['output_type'] = 'display_data';\n",
              "        await google.colab.output.renderOutput(dataTable, element);\n",
              "        const docLink = document.createElement('div');\n",
              "        docLink.innerHTML = docLinkHtml;\n",
              "        element.appendChild(docLink);\n",
              "      }\n",
              "    </script>\n",
              "  </div>\n",
              "\n",
              "\n",
              "    <div id=\"df-0530c71a-513e-42d7-820e-aa3ed9e3907e\">\n",
              "      <button class=\"colab-df-quickchart\" onclick=\"quickchart('df-0530c71a-513e-42d7-820e-aa3ed9e3907e')\"\n",
              "                title=\"Suggest charts\"\n",
              "                style=\"display:none;\">\n",
              "\n",
              "<svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\"viewBox=\"0 0 24 24\"\n",
              "     width=\"24px\">\n",
              "    <g>\n",
              "        <path d=\"M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM9 17H7v-7h2v7zm4 0h-2V7h2v10zm4 0h-2v-4h2v4z\"/>\n",
              "    </g>\n",
              "</svg>\n",
              "      </button>\n",
              "\n",
              "<style>\n",
              "  .colab-df-quickchart {\n",
              "      --bg-color: #E8F0FE;\n",
              "      --fill-color: #1967D2;\n",
              "      --hover-bg-color: #E2EBFA;\n",
              "      --hover-fill-color: #174EA6;\n",
              "      --disabled-fill-color: #AAA;\n",
              "      --disabled-bg-color: #DDD;\n",
              "  }\n",
              "\n",
              "  [theme=dark] .colab-df-quickchart {\n",
              "      --bg-color: #3B4455;\n",
              "      --fill-color: #D2E3FC;\n",
              "      --hover-bg-color: #434B5C;\n",
              "      --hover-fill-color: #FFFFFF;\n",
              "      --disabled-bg-color: #3B4455;\n",
              "      --disabled-fill-color: #666;\n",
              "  }\n",
              "\n",
              "  .colab-df-quickchart {\n",
              "    background-color: var(--bg-color);\n",
              "    border: none;\n",
              "    border-radius: 50%;\n",
              "    cursor: pointer;\n",
              "    display: none;\n",
              "    fill: var(--fill-color);\n",
              "    height: 32px;\n",
              "    padding: 0;\n",
              "    width: 32px;\n",
              "  }\n",
              "\n",
              "  .colab-df-quickchart:hover {\n",
              "    background-color: var(--hover-bg-color);\n",
              "    box-shadow: 0 1px 2px rgba(60, 64, 67, 0.3), 0 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
              "    fill: var(--button-hover-fill-color);\n",
              "  }\n",
              "\n",
              "  .colab-df-quickchart-complete:disabled,\n",
              "  .colab-df-quickchart-complete:disabled:hover {\n",
              "    background-color: var(--disabled-bg-color);\n",
              "    fill: var(--disabled-fill-color);\n",
              "    box-shadow: none;\n",
              "  }\n",
              "\n",
              "  .colab-df-spinner {\n",
              "    border: 2px solid var(--fill-color);\n",
              "    border-color: transparent;\n",
              "    border-bottom-color: var(--fill-color);\n",
              "    animation:\n",
              "      spin 1s steps(1) infinite;\n",
              "  }\n",
              "\n",
              "  @keyframes spin {\n",
              "    0% {\n",
              "      border-color: transparent;\n",
              "      border-bottom-color: var(--fill-color);\n",
              "      border-left-color: var(--fill-color);\n",
              "    }\n",
              "    20% {\n",
              "      border-color: transparent;\n",
              "      border-left-color: var(--fill-color);\n",
              "      border-top-color: var(--fill-color);\n",
              "    }\n",
              "    30% {\n",
              "      border-color: transparent;\n",
              "      border-left-color: var(--fill-color);\n",
              "      border-top-color: var(--fill-color);\n",
              "      border-right-color: var(--fill-color);\n",
              "    }\n",
              "    40% {\n",
              "      border-color: transparent;\n",
              "      border-right-color: var(--fill-color);\n",
              "      border-top-color: var(--fill-color);\n",
              "    }\n",
              "    60% {\n",
              "      border-color: transparent;\n",
              "      border-right-color: var(--fill-color);\n",
              "    }\n",
              "    80% {\n",
              "      border-color: transparent;\n",
              "      border-right-color: var(--fill-color);\n",
              "      border-bottom-color: var(--fill-color);\n",
              "    }\n",
              "    90% {\n",
              "      border-color: transparent;\n",
              "      border-bottom-color: var(--fill-color);\n",
              "    }\n",
              "  }\n",
              "</style>\n",
              "\n",
              "      <script>\n",
              "        async function quickchart(key) {\n",
              "          const quickchartButtonEl =\n",
              "            document.querySelector('#' + key + ' button');\n",
              "          quickchartButtonEl.disabled = true;  // To prevent multiple clicks.\n",
              "          quickchartButtonEl.classList.add('colab-df-spinner');\n",
              "          try {\n",
              "            const charts = await google.colab.kernel.invokeFunction(\n",
              "                'suggestCharts', [key], {});\n",
              "          } catch (error) {\n",
              "            console.error('Error during call to suggestCharts:', error);\n",
              "          }\n",
              "          quickchartButtonEl.classList.remove('colab-df-spinner');\n",
              "          quickchartButtonEl.classList.add('colab-df-quickchart-complete');\n",
              "        }\n",
              "        (() => {\n",
              "          let quickchartButtonEl =\n",
              "            document.querySelector('#df-0530c71a-513e-42d7-820e-aa3ed9e3907e button');\n",
              "          quickchartButtonEl.style.display =\n",
              "            google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
              "        })();\n",
              "      </script>\n",
              "    </div>\n",
              "\n",
              "    </div>\n",
              "  </div>\n"
            ],
            "application/vnd.google.colaboratory.intrinsic+json": {
              "type": "dataframe",
              "variable_name": "combined_df",
              "summary": "{\n  \"name\": \"combined_df\",\n  \"rows\": 500,\n  \"fields\": [\n    {\n      \"column\": \"id\",\n      \"properties\": {\n        \"dtype\": \"number\",\n        \"std\": 144,\n        \"min\": 0,\n        \"max\": 499,\n        \"num_unique_values\": 500,\n        \"samples\": [\n          361,\n          73,\n          374\n        ],\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    },\n    {\n      \"column\": \"question\",\n      \"properties\": {\n        \"dtype\": \"string\",\n        \"num_unique_values\": 500,\n        \"samples\": [\n          \"What are the four main hypotheses that have been proposed to explain the formation of Artemis on Venus?\",\n          \"Could you please list all the key actions that Ready Fairfax asks residents and neighbors to take for emergency preparedness?\",\n          \"For how many years was Jubilee Showcase broadcast on TV?\"\n        ],\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    },\n    {\n      \"column\": \"passages\",\n      \"properties\": {\n        \"dtype\": \"object\",\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    },\n    {\n      \"column\": \"final_prompt\",\n      \"properties\": {\n        \"dtype\": \"string\",\n        \"num_unique_values\": 500,\n        \"samples\": [\n          \"Answer the question using only the context below. Do not make up any new information.\\nIf no part of the answer is found in the context, respond only with: 'I don't know.'\\nIf only part of the answer is found, include that part in a complete sentence that uses the phrasing of the question, and state that the rest is not available in the context.\\nIf the full answer is found, respond with a complete sentence that includes the phrasing of the question.\\n\\nContext:\\nThe four main hypotheses proposed to explain the formation of Artemis on Venus are: H1, Artemis Chasma represents a zone of northwest-directed convergence and subduction; H2, Artemis consists of a composite structure with a part of its interior region marking the exposure of deformed ductile deep-crustal rocks analogous to a terrestrial metamorphic core complex; H3, Artemis reflects the surface expression of an ancient (>3.5 Ga) huge bolide impact event on cold strong lithosphere; and H4, Artemis marks the surface expression of a deep mantle plume.\\n\\nQuestion: What are the four main hypotheses that have been proposed to explain the formation of Artemis on Venus?\\nAnswer:\",\n          \"Answer the question using only the context below. Do not make up any new information.\\nIf no part of the answer is found in the context, respond only with: 'I don't know.'\\nIf only part of the answer is found, include that part in a complete sentence that uses the phrasing of the question, and state that the rest is not available in the context.\\nIf the full answer is found, respond with a complete sentence that includes the phrasing of the question.\\n\\nContext:\\nReady Fairfax asks residents and neighbors to take several key actions for emergency preparedness, including talking to neighbors about wildfire safety, planning neighborhood cooperation after a wildfire, listing neighbors' skills, considering assistance for those with special needs, and making plans for children. Additionally, they recommend following local burning laws by contacting local authorities for a burning permit before disposing of debris.\\n---\\nReady Fairfax asks residents and neighbors to take the following key actions for emergency preparedness: know your building inside and out, ensure all fire protection systems are working properly, have an evacuation plan posted in every room and practice it with everyone, and report fire hazards and correct them.\\n---\\nReady Fairfax asks residents and neighbors to take several key actions for emergency preparedness, including replacing locks when keys are lost or when moving into a new home, having and practicing a safety plan for fires or other emergencies, having a first aid kit on hand, having important emergency numbers handy, asking for proper identification from repair and delivery people before giving them access, installing locks on all doors and windows and using them, closing blinds and curtains at dusk, knowing neighbors and having their phone numbers handy, becoming involved in Neighborhood Watch, avoiding unsafe situations and being alone with unfamiliar people, maintaining good visibility around the home, ensuring all areas are well lit, installing a peep-hole in the front door, and teaching children not to open the door to anyone or disclose personal information over the phone.\\n---\\nReady Fairfax asks residents and neighbors to take the following key actions for emergency preparedness:\\n\\n1. Have an emergency plan that is written and practiced at home.\\n2. Have the necessary supplies in case you need to shelter-in-place or evacuate.\\n3. Know how to get information about the emergency.\\n4. Know how to support your community by helping your neighbors.\\n5. Make a family emergency plan.\\n6. Make a business preparedness plan.\\n7. Make a neighborhood preparedness plan.\\n8. Create an emergency supply kit.\\n9. Sign-up for your local jurisdictional emergency alerts.\\n10. Create a personal response plan.\\n11. Practice and maintain plans.\\n12. Purchase or make disaster supply kits (office, car, and personal go kit).\\n\\nThese actions are aimed at ensuring individuals and communities are ready to handle emergencies effectively.\\n\\nQuestion: Could you please list all the key actions that Ready Fairfax asks residents and neighbors to take for emergency preparedness?\\nAnswer:\",\n          \"Answer the question using only the context below. Do not make up any new information.\\nIf no part of the answer is found in the context, respond only with: 'I don't know.'\\nIf only part of the answer is found, include that part in a complete sentence that uses the phrasing of the question, and state that the rest is not available in the context.\\nIf the full answer is found, respond with a complete sentence that includes the phrasing of the question.\\n\\nContext:\\n\\\"Jubilee Showcase\\\" was broadcast on TV for 21 seasons, from 1963 to 1984.\\n\\nQuestion: For how many years was Jubilee Showcase broadcast on TV?\\nAnswer:\"\n        ],\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    },\n    {\n      \"column\": \"answer\",\n      \"properties\": {\n        \"dtype\": \"string\",\n        \"num_unique_values\": 392,\n        \"samples\": [\n          \"The aspects of lake productivity measured include turbidity, total suspended solids, and algae, particularly chlorophyll pigments. Water visibility is monitored through turbidity and water clarity.\",\n          \"The number of pregnant women carrying Group B Strep bacteria can vary, but in the specific study mentioned, approximately 66 women out of 549 carried the bacteria. However, this number is specific to the study population and cannot be generalized without additional context or data.\",\n          \"The early cricket radio commentators included Darren Lehmann, Ian Chappell, Glenn McGrath, Mike Hussey, Carl Rackemann, Tom Moody, John Emburey, Brad Hodge, Karl Langdon, Julia Price, Sarah Aley, Holly Ferling, Kristen Beams, Alex Blackwell, Bruce Eva, Tim Lane, David Morrow, and Matt Thompson. Additionally, EW Swanton, Rex Alston, John Arlott, Ken Ablack, Robert Hudson, Peter West, Alan Gibson, Peter Cranmer, Brian Johnston, Neil Durden-Smith, Don Mosey, Tony Lewis, Jack Bannister, and Arlo White were also early cricket radio commentators.\"\n        ],\n        \"semantic_type\": \"\",\n        \"description\": \"\"\n      }\n    }\n  ]\n}"
            }
          },
          "metadata": {},
          "execution_count": 68
        }
      ]
    }
  ]
}
