{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 151,
   "metadata": {},
   "outputs": [],
   "source": [
    "import jsonlines\n",
    "from collections import defaultdict\n",
    "import openai\n",
    "import json\n",
    "import regex\n",
    "from typing import Dict, Any, Tuple\n",
    "from tqdm import tqdm \n",
    "import numpy as np\n",
    "from matplotlib import pyplot as plt\n",
    "import seaborn as sns"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 152,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_data = defaultdict(list)\n",
    "test_data = defaultdict(list)\n",
    "\n",
    "for year in [1998, 2001, 2004, 2007, 2010, 2013, 2016]:\n",
    "    with jsonlines.open(f'year_test/year_{year}.jsonl') as f:\n",
    "        for line in f.iter():\n",
    "            train_data[year].append(line)\n",
    "\n",
    "\n",
    "for year in range(1998, 2023):\n",
    "    with jsonlines.open(f'year_test/year_test_{year}.jsonl') as f:\n",
    "        for line in f.iter():\n",
    "            test_data[year].append(line)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "openai.api_key = 'your_api_key'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Have to modify to your fine-tuned models!\n",
    "\n",
    "ft_model = {\n",
    "    1998: 'ft:gpt-4o-2024-08-06:personal:year-1998:AdGh1Inl',\n",
    "    2001: 'ft:gpt-4o-2024-08-06:personal:year-2001:AdHbsodp',\n",
    "    2004: 'ft:gpt-4o-2024-08-06:personal:year-2004:AdMLbkJa',\n",
    "    2007: 'ft:gpt-4o-2024-08-06:personal:year-2007:AdI6ZBwG',\n",
    "    2010: 'ft:gpt-4o-2024-08-06:personal:year-2010:AdMHGRYF',\n",
    "    2013: 'ft:gpt-4o-2024-08-06:personal:year-2013:AdM8yQus',\n",
    "    2016: 'ft:gpt-4o-2024-08-06:personal:year-2016:AdIC2TYu',\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "time_dict = {\n",
    "    'days': 24,\n",
    "    'day': 24,\n",
    "    'weeks': 24*7,\n",
    "    'week': 24*7,\n",
    "    'hours': 1,\n",
    "    'hour': 1,\n",
    "    'h': 1,\n",
    "    'minute': 1/60,\n",
    "    'min': 1/60,\n",
    "    's': 1/3600\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_answers(model: str, message: dict, temperature=0.0) -> tuple:\n",
    "    completion = openai.ChatCompletion.create(\n",
    "        model=model,\n",
    "        messages=message['messages'][:2],\n",
    "        temperature=temperature\n",
    "    )\n",
    "\n",
    "    prediction = json.loads(completion.choices[0].message['content'])\n",
    "    true = json.loads(message['messages'][2]['content'])\n",
    "\n",
    "    return true, prediction"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  6%|▌         | 24/429 [01:24<19:07,  2.83s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-23 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:30:55 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '2266', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999874', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_fbd8bba69e6718c8f64f88c4bc119c92', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0aa369dfeaeaa7-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  9%|▉         | 38/429 [02:58<28:35,  4.39s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-37 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:32:30 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '6928', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999879', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_181ff6dd0202c8234f9b887cd7b21901', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0aa59aaed8eaa7-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 10%|█         | 43/429 [03:37<58:56,  9.16s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-42 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:33:09 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '19597', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999879', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_c07e7541cf92c75bde841fff926204cd', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0aa63f2a1cc43a-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 60/429 [04:39<51:57,  8.45s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-59 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:34:10 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '22386', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999885', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_248201472f9293c6c9b29ebad5778dec', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0aa7ad3f00c43a-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 18%|█▊        | 77/429 [05:31<16:20,  2.79s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-76 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:35:02 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '3004', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999877', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_50390ad4816b5e3794f38b11ad7d75ca', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0aa96dbd23c43a-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 77%|███████▋  | 330/429 [16:54<04:11,  2.54s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-329 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:46:25 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '1439', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999882', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_c336397195550d42ae60cf427f763c79', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0aba211b83ea9b-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 86%|████████▌ | 370/429 [18:37<03:51,  3.92s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-369 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:48:08 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '6943', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999880', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_5e9544c781903edc63e47e8e750e440b', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0abc843de03085-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 86%|████████▋ | 371/429 [18:39<03:06,  3.21s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-370 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:48:10 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '1161', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999880', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_d2be6d4da2604452911224a9506951b8', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0abcb368843085-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 92%|█████████▏| 393/429 [19:50<02:00,  3.35s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-392 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:49:21 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '591', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999870', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_f927240f975112ab5522dbf21025d1cd', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0abe727d7f3085-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [21:31<00:00,  3.01s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 31%|███       | 108/347 [05:07<12:59,  3.26s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-107 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:56:09 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '738', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999877', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_3f588c8717e342bea3423e6019b572db', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0ac86a6b2bea99-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 53%|█████▎    | 184/347 [07:51<05:34,  2.05s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-183 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:58:53 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '1427', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999871', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_d32fd9ad683bdf4617c3a84f4c350877', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0acc66e93dea1c-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 56%|█████▌    | 193/347 [08:13<04:46,  1.86s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-192 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:59:15 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '696', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999879', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_76d37161fa9032aeb076d7eb1bf8f62c', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0accf669bbea1c-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 59%|█████▉    | 206/347 [08:45<04:54,  2.09s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-205 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:59:47 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '1408', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999879', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_45db4974c5a41c5c6c2cb4fe5c11cdec', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0acdb6ae62ea1c-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 61%|██████    | 212/347 [08:55<04:03,  1.81s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-211 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 03:59:58 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '1337', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999876', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_6242c490132bd70183fc1c0f00165511', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0acdfacef2ea1c-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 75%|███████▍  | 259/347 [12:18<42:35, 29.04s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-1998-258 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 04:03:20 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '90704', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999876', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_5f0d0051263d2dcecfd2230f1cc08eee', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0ad0bc9f4aeaa3-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [16:23<00:00,  2.83s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [11:44<00:00,  2.77s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [00:41<00:00,  2.30s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [00:42<00:00,  2.15s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:17<00:00,  2.14s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [21:29<00:00,  3.00s/it] \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [15:41<00:00,  2.71s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [10:54<00:00,  2.58s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [03:50<00:00, 12.83s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [00:46<00:00,  2.32s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:25<00:00,  3.24s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 24%|██▎       | 101/429 [12:06<09:55,  1.82s/it] "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-100 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 121/429 [12:48<08:59,  1.75s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-120 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 122/429 [12:48<07:16,  1.42s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-121 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 51%|█████     | 219/429 [18:56<07:15,  2.07s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-218 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [27:01<00:00,  3.78s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████   | 247/347 [09:49<02:47,  1.67s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-246 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████▏  | 248/347 [09:49<02:23,  1.45s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-247 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 249/347 [09:51<02:11,  1.34s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-248 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 250/347 [09:52<02:25,  1.50s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-249 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 95%|█████████▌| 330/347 [12:46<00:42,  2.50s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-329 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [13:22<00:00,  2.31s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [10:21<00:00,  2.45s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 17%|█▋        | 3/18 [00:04<00:21,  1.44s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-2 :  The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_ee11d6e93fbc8f034b768352927f1c3c in your email.) {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_ee11d6e93fbc8f034b768352927f1c3c in your email.)\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_ee11d6e93fbc8f034b768352927f1c3c in your email.)', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 06:04:50 GMT', 'Content-Type': 'application/json', 'Content-Length': '369', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-processing-ms': '197', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999868', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_ee11d6e93fbc8f034b768352927f1c3c', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0b84edff33ea15-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 61%|██████    | 11/18 [01:51<02:46, 23.82s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-10 :  The server is overloaded or not ready yet.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [02:06<00:00,  7.02s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [00:46<00:00,  2.33s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:34<00:00,  4.36s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 22%|██▏       | 93/429 [07:57<11:05,  1.98s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-92 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 22%|██▏       | 94/429 [07:59<09:54,  1.78s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-93 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 24%|██▎       | 101/429 [08:15<10:05,  1.85s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-100 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 121/429 [09:12<10:11,  1.98s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-120 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 122/429 [09:12<08:29,  1.66s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-121 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 36%|███▋      | 156/429 [10:29<11:28,  2.52s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-155 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 158/429 [10:33<09:10,  2.03s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-157 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 164/429 [10:49<12:03,  2.73s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-163 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 51%|█████     | 219/429 [13:22<06:15,  1.79s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-218 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 69%|██████▉   | 297/429 [16:49<03:52,  1.76s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-296 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 77%|███████▋  | 331/429 [18:13<03:23,  2.08s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-330 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [22:18<00:00,  3.12s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 97/347 [03:44<07:48,  1.87s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-96 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 98/347 [03:45<06:45,  1.63s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-97 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████   | 247/347 [10:08<03:26,  2.07s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-246 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████▏  | 248/347 [10:09<02:41,  1.63s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-247 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 249/347 [10:18<06:30,  3.98s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-248 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 250/347 [10:19<04:50,  3.00s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-249 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [14:26<00:00,  2.50s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 183/254 [07:55<02:50,  2.40s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-182 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [10:56<00:00,  2.59s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [00:43<00:00,  2.40s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [00:44<00:00,  2.24s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:21<00:00,  2.67s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 22%|██▏       | 93/429 [19:11<13:39,  2.44s/it]   "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-92 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 22%|██▏       | 95/429 [19:15<12:30,  2.25s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-94 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 24%|██▎       | 101/429 [19:27<09:35,  1.75s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-100 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 121/429 [20:14<09:07,  1.78s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-120 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 122/429 [20:15<08:26,  1.65s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-121 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 36%|███▋      | 156/429 [21:31<09:17,  2.04s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-155 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 158/429 [21:36<09:17,  2.06s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-157 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 45%|████▍     | 193/429 [23:00<07:22,  1.88s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-192 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 51%|█████     | 219/429 [24:07<06:33,  1.88s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-218 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 59%|█████▊    | 251/429 [25:25<05:38,  1.90s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-250 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 59%|█████▉    | 253/429 [25:28<05:03,  1.73s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-252 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 59%|█████▉    | 254/429 [25:30<05:17,  1.81s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-253 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 62%|██████▏   | 264/429 [25:51<05:21,  1.95s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-263 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 69%|██████▉   | 298/429 [27:11<04:13,  1.94s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-297 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 77%|███████▋  | 332/429 [28:37<03:12,  1.99s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-331 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 79%|███████▉  | 339/429 [28:52<02:56,  1.96s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-338 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 79%|███████▉  | 340/429 [28:53<02:29,  1.68s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-339 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 82%|████████▏ | 351/429 [29:18<02:40,  2.06s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-350 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 361/429 [29:39<02:06,  1.86s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-360 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 362/429 [29:41<02:03,  1.84s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-361 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [32:26<00:00,  4.54s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 97/347 [03:41<08:28,  2.03s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-96 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 29%|██▊       | 99/347 [03:44<06:51,  1.66s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-98 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 35%|███▌      | 123/347 [04:42<08:49,  2.36s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-122 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 56%|█████▋    | 196/347 [07:37<07:55,  3.15s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-195 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████   | 247/347 [09:39<02:53,  1.74s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-246 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████▏  | 248/347 [09:40<02:29,  1.51s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-247 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 82%|████████▏ | 284/347 [11:15<02:07,  2.02s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-283 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 95%|█████████▌| 330/347 [13:27<00:33,  1.95s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-329 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [14:13<00:00,  2.46s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 93/254 [03:49<05:48,  2.17s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-92 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 58%|█████▊    | 147/254 [05:53<03:10,  1.78s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-146 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 183/254 [07:19<02:37,  2.22s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-182 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 77%|███████▋  | 195/254 [08:00<02:13,  2.26s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-194 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [10:12<00:00,  2.41s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [00:39<00:00,  2.20s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 30%|███       | 6/20 [00:12<00:28,  2.06s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-5 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [00:47<00:00,  2.39s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:19<00:00,  2.44s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 360/429 [16:01<02:10,  1.89s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-359 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [19:25<00:00,  2.72s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 29%|██▊       | 99/347 [04:45<56:07, 13.58s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-98 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 52%|█████▏    | 179/347 [09:42<1:22:04, 29.31s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-178 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 08:25:32 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '90738', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999879', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_73e23ce0dad78b88cf665c90904a5f04', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0c50d32c6bea27-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [17:33<00:00,  3.04s/it]  \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 183/254 [07:39<02:50,  2.41s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-182 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [10:45<00:00,  2.54s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [00:44<00:00,  2.48s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [01:00<00:00,  3.00s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:19<00:00,  2.39s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2016-2017\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 36%|███▌      | 155/429 [09:46<2:16:12, 29.83s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2016-154 :  The server had an error while processing your request. Sorry about that! {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error while processing your request. Sorry about that!\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error while processing your request. Sorry about that!', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 08:56:00 GMT', 'Content-Type': 'application/json', 'Content-Length': '175', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-organization': 'user-bh02lranbcmbofrotobh9bep', 'openai-processing-ms': '93585', 'openai-version': '2020-10-01', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999869', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_61b23335389937adc9717cef1cf072a9', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0c7d5d1a43ea03-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 429/429 [25:19<00:00,  3.54s/it]  \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2016-2018\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 80%|████████  | 279/347 [12:14<02:27,  2.17s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2016-278 :  Expecting value: line 1 column 43 (char 42)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 347/347 [15:25<00:00,  2.67s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2016-2019\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 75%|███████▌  | 191/254 [08:07<02:27,  2.34s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2016-190 :  Expecting value: line 1 column 62 (char 61)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 254/254 [10:41<00:00,  2.52s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2016-2020\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18/18 [00:45<00:00,  2.50s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2016-2021\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 20/20 [00:48<00:00,  2.45s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2016-2022\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 8/8 [00:17<00:00,  2.14s/it]\n"
     ]
    }
   ],
   "source": [
    "# After 2017\n",
    "true_label, prediction_label = defaultdict(list), defaultdict(list)\n",
    "\n",
    "for i in [1998, 2001, 2004, 2007, 2010, 2013, 2016]:\n",
    "    model = ft_model[i]\n",
    "\n",
    "    for j in [2017, 2018, 2019, 2020, 2021, 2022]:\n",
    "        testset = test_data[j]\n",
    "\n",
    "        print (f'start: {i}-{j}')\n",
    "\n",
    "        for k, message in tqdm(enumerate(testset), total=len(testset)):\n",
    "            try:\n",
    "                true, prediction = get_answers(model, message)\n",
    "            except Exception as e:\n",
    "                print (f'gpt3.5-{i}-{k} : ', e)\n",
    "                continue\n",
    "            else:\n",
    "                true_label[f'year{i}-{j}'].append(true)\n",
    "                prediction_label[f'year{i}-{j}'].append(prediction)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 155,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-1999\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 46/46 [02:53<00:00,  3.77s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2000\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 31/31 [01:15<00:00,  2.43s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2001\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 73/73 [03:22<00:00,  2.78s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2002\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 85/85 [03:56<00:00,  2.78s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 1998-2003\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 160/160 [06:53<00:00,  2.58s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2002\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 85/85 [04:35<00:00,  3.25s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2003\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 160/160 [06:49<00:00,  2.56s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2004\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 160/160 [06:55<00:00,  2.59s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2005\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 222/222 [09:32<00:00,  2.58s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2001-2006\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 383/383 [17:36<00:00,  2.76s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2005\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 70%|██████▉   | 155/222 [07:18<02:19,  2.08s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-154 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 222/222 [10:15<00:00,  2.77s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2006\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 275/383 [13:33<03:17,  1.83s/it] "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-274 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 276/383 [13:34<02:50,  1.59s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-275 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 85%|████████▌ | 326/383 [15:40<01:39,  1.74s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-325 :  The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_d26c8895f23cb1eebffbb53953cb2c7f in your email.) {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_d26c8895f23cb1eebffbb53953cb2c7f in your email.)\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_d26c8895f23cb1eebffbb53953cb2c7f in your email.)', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 14:29:29 GMT', 'Content-Type': 'application/json', 'Content-Length': '369', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-processing-ms': '110', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999874', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_d26c8895f23cb1eebffbb53953cb2c7f', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f0e682ab96ceaa7-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 383/383 [19:00<00:00,  2.98s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2007\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 374/374 [17:18<00:00,  2.78s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2004-2008\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 82/590 [03:50<24:39,  2.91s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-81 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 83/590 [03:52<20:23,  2.41s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-82 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 221/590 [10:02<13:13,  2.15s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-220 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 400/590 [17:56<07:37,  2.41s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-399 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 401/590 [18:03<12:35,  4.00s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-400 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 402/590 [18:06<11:04,  3.54s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-401 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 81%|████████  | 476/590 [21:25<04:38,  2.45s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-475 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 590/590 [26:40<00:00,  2.71s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-589 :  Expecting value: line 1 column 1 (char 0)\n",
      "start: 2004-2009\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 17%|█▋        | 128/766 [06:25<29:08,  2.74s/it] "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-127 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 20%|██        | 157/766 [08:01<33:26,  3.30s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-156 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 158/766 [08:01<25:34,  2.52s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-157 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 159/766 [08:03<21:44,  2.15s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-158 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 160/766 [08:04<18:16,  1.81s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-159 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 291/766 [13:51<15:23,  1.94s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-290 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 61%|██████▏   | 470/766 [23:34<08:12,  1.67s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-469 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 80%|████████  | 614/766 [30:30<05:42,  2.26s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-613 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 81%|████████  | 619/766 [30:40<04:18,  1.76s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2004-618 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 766/766 [37:28<00:00,  2.94s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2008\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 82/590 [04:01<14:32,  1.72s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-81 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 83/590 [04:01<11:51,  1.40s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-82 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 221/590 [11:17<19:40,  3.20s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-220 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 48%|████▊     | 286/590 [13:59<09:14,  1.82s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-285 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 400/590 [19:52<05:30,  1.74s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-399 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 401/590 [19:53<05:06,  1.62s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-400 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 402/590 [19:54<04:12,  1.34s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-401 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 81%|████████  | 476/590 [23:32<03:30,  1.85s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-475 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 590/590 [28:35<00:00,  2.91s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-589 :  Expecting value: line 1 column 1 (char 0)\n",
      "start: 2007-2009\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 20%|██        | 154/766 [06:27<20:42,  2.03s/it] "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-153 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 20%|██        | 157/766 [06:33<18:11,  1.79s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-156 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 158/766 [06:33<14:44,  1.45s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-157 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 159/766 [06:35<16:35,  1.64s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-158 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 160/766 [06:36<14:17,  1.42s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-159 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 291/766 [12:37<13:29,  1.70s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-290 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 61%|██████▏   | 470/766 [20:05<09:19,  1.89s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-469 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 78%|███████▊  | 601/766 [26:27<05:03,  1.84s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-600 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 80%|████████  | 614/766 [27:19<05:10,  2.05s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-613 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 81%|████████  | 619/766 [27:30<04:43,  1.93s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-618 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 90%|█████████ | 691/766 [30:31<02:22,  1.90s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-690 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 90%|█████████ | 692/766 [30:33<02:30,  2.04s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-691 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 766/766 [33:55<00:00,  2.66s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2010\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 62%|██████▏   | 389/632 [17:37<11:34,  2.86s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-388 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 64%|██████▍   | 405/632 [18:24<07:56,  2.10s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-404 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 80%|███████▉  | 503/632 [22:44<06:24,  2.98s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-502 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 632/632 [28:13<00:00,  2.68s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2011\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 31%|███▏      | 266/847 [11:59<18:30,  1.91s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-265 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 32%|███▏      | 267/847 [12:00<16:41,  1.73s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-266 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 32%|███▏      | 268/847 [12:01<14:32,  1.51s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-267 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 310/847 [13:45<17:20,  1.94s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-309 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 45%|████▍     | 380/847 [17:27<37:35,  4.83s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-379 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 45%|████▍     | 381/847 [17:28<28:42,  3.70s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-380 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 45%|████▌     | 382/847 [17:29<21:49,  2.82s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-381 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 45%|████▌     | 383/847 [17:29<16:56,  2.19s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-382 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|████▉     | 422/847 [19:24<14:20,  2.02s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-421 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|████▉     | 423/847 [19:25<11:20,  1.60s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-422 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|█████     | 424/847 [19:26<09:26,  1.34s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-423 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|█████     | 425/847 [19:27<09:27,  1.34s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-424 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 76%|███████▋  | 647/847 [28:21<06:06,  1.83s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-646 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 847/847 [37:02<00:00,  2.62s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2007-2012\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  1%|          | 5/812 [02:10<8:04:30, 36.02s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-4 :  The server is overloaded or not ready yet.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  1%|          | 10/812 [03:01<4:18:15, 19.32s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-9 :  The server is overloaded or not ready yet.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  5%|▌         | 42/812 [04:14<23:30,  1.83s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-41 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 112/812 [06:54<17:38,  1.51s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-111 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 35%|███▍      | 284/812 [13:58<17:30,  1.99s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-283 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 308/812 [14:52<14:23,  1.71s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-307 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 53%|█████▎    | 433/812 [20:33<11:42,  1.85s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-432 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 58%|█████▊    | 467/812 [21:53<09:58,  1.74s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-466 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 60%|█████▉    | 486/812 [22:37<14:13,  2.62s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2007-485 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 812/812 [36:49<00:00,  2.72s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2011\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 18%|█▊        | 155/847 [07:06<22:01,  1.91s/it] "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-154 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 32%|███▏      | 268/847 [11:42<17:28,  1.81s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-267 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 32%|███▏      | 275/847 [11:57<20:10,  2.12s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-274 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 36%|███▌      | 302/847 [13:08<30:05,  3.31s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-301 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 310/847 [13:25<17:00,  1.90s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-309 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 43%|████▎     | 363/847 [15:45<24:23,  3.02s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-362 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 43%|████▎     | 367/847 [15:52<16:58,  2.12s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-366 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 44%|████▎     | 369/847 [15:55<14:43,  1.85s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-368 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 45%|████▌     | 383/847 [16:23<12:39,  1.64s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-382 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|█████     | 424/847 [17:57<13:47,  1.96s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-423 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 54%|█████▍    | 457/847 [19:13<12:20,  1.90s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-456 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 54%|█████▍    | 458/847 [19:14<11:07,  1.72s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-457 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 54%|█████▍    | 461/847 [19:19<10:06,  1.57s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-460 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 72%|███████▏  | 609/847 [25:29<08:52,  2.24s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-608 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 79%|███████▉  | 673/847 [28:11<06:32,  2.25s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-672 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 80%|███████▉  | 674/847 [28:13<06:16,  2.18s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-673 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 81%|████████▏ | 690/847 [29:21<08:17,  3.17s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-689 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 83%|████████▎ | 707/847 [30:02<04:51,  2.08s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-706 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 85%|████████▍ | 717/847 [30:35<08:15,  3.81s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-716 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 847/847 [35:53<00:00,  2.54s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2012\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  5%|▌         | 42/812 [02:02<23:49,  1.86s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-41 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 14%|█▍        | 112/812 [04:45<22:28,  1.93s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-111 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 18%|█▊        | 143/812 [05:51<18:15,  1.64s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-142 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 24%|██▍       | 194/812 [07:52<23:34,  2.29s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-193 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 32%|███▏      | 259/812 [10:24<17:42,  1.92s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-258 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 34%|███▍      | 276/812 [11:04<15:28,  1.73s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-275 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 35%|███▌      | 285/812 [11:21<15:09,  1.72s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-284 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 308/812 [12:18<14:43,  1.75s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-307 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 312/812 [12:26<15:18,  1.84s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-311 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 39%|███▊      | 313/812 [12:27<13:19,  1.60s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-312 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 39%|███▊      | 314/812 [12:28<12:40,  1.53s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-313 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 39%|███▉      | 319/812 [12:37<13:26,  1.64s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-318 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 39%|███▉      | 320/812 [12:39<14:06,  1.72s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-319 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 40%|███▉      | 321/812 [12:41<14:55,  1.82s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-320 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 63%|██████▎   | 510/812 [20:35<09:35,  1.90s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-509 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 63%|██████▎   | 511/812 [20:36<08:05,  1.61s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-510 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 64%|██████▍   | 523/812 [21:06<11:21,  2.36s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-522 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 71%|███████▏  | 580/812 [24:46<59:46, 15.46s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-579 :  The server is overloaded or not ready yet.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 77%|███████▋  | 623/812 [26:36<08:30,  2.70s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-622 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 682/812 [29:26<04:36,  2.13s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-681 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 684/812 [29:29<03:43,  1.75s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-683 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 685/812 [29:31<03:26,  1.62s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-684 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 686/812 [29:32<03:00,  1.43s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-685 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 812/812 [34:16<00:00,  2.53s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2013\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  1%|          | 8/736 [00:18<27:39,  2.28s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-7 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 12%|█▏        | 91/736 [03:40<32:52,  3.06s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-90 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 23%|██▎       | 168/736 [06:52<22:56,  2.42s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-167 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 23%|██▎       | 169/736 [06:53<19:24,  2.05s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-168 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 24%|██▎       | 174/736 [07:03<18:22,  1.96s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-173 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 29%|██▊       | 211/736 [08:38<20:20,  2.32s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-210 :  The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_1dd6f5ac5123fd4ae83faf7da5604965 in your email.) {\n",
      "  \"error\": {\n",
      "    \"message\": \"The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_1dd6f5ac5123fd4ae83faf7da5604965 in your email.)\",\n",
      "    \"type\": \"server_error\",\n",
      "    \"param\": null,\n",
      "    \"code\": null\n",
      "  }\n",
      "} 500 {'error': {'message': 'The server had an error processing your request. Sorry about that! You can retry your request, or contact us through our help center at help.openai.com if you keep seeing this error. (Please include the request ID req_1dd6f5ac5123fd4ae83faf7da5604965 in your email.)', 'type': 'server_error', 'param': None, 'code': None}} {'Date': 'Thu, 12 Dec 2024 19:57:41 GMT', 'Content-Type': 'application/json', 'Content-Length': '369', 'Connection': 'keep-alive', 'access-control-expose-headers': 'X-Request-ID', 'openai-processing-ms': '123', 'x-ratelimit-limit-requests': '10000', 'x-ratelimit-limit-tokens': '30000000', 'x-ratelimit-remaining-requests': '9999', 'x-ratelimit-remaining-tokens': '29999875', 'x-ratelimit-reset-requests': '6ms', 'x-ratelimit-reset-tokens': '0s', 'x-request-id': 'req_1dd6f5ac5123fd4ae83faf7da5604965', 'strict-transport-security': 'max-age=31536000; includeSubDomains; preload', 'CF-Cache-Status': 'DYNAMIC', 'X-Content-Type-Options': 'nosniff', 'Server': 'cloudflare', 'CF-RAY': '8f1048ebda603268-ICN', 'alt-svc': 'h3=\":443\"; ma=86400'}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 40%|████      | 296/736 [11:48<13:34,  1.85s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-295 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 48%|████▊     | 352/736 [13:52<14:07,  2.21s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-351 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 49%|████▉     | 363/736 [14:14<11:38,  1.87s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-362 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 51%|█████     | 372/736 [14:32<11:04,  1.82s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-371 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 57%|█████▋    | 423/736 [16:45<12:06,  2.32s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-422 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 59%|█████▉    | 434/736 [17:06<09:02,  1.80s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-433 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 65%|██████▍   | 478/736 [19:16<09:08,  2.13s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-477 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 68%|██████▊   | 500/736 [20:07<07:50,  1.99s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-499 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 73%|███████▎  | 539/736 [21:50<06:20,  1.93s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-538 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 76%|███████▌  | 558/736 [22:41<06:37,  2.23s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-557 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 76%|███████▋  | 562/736 [22:48<05:40,  1.96s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-561 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 77%|███████▋  | 565/736 [23:00<10:35,  3.72s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-564 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 82%|████████▏ | 607/736 [24:39<04:32,  2.11s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-606 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 736/736 [29:47<00:00,  2.43s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2014\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  4%|▍         | 34/790 [01:48<37:57,  3.01s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-33 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 18%|█▊        | 139/790 [07:18<20:59,  1.94s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-138 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 30%|██▉       | 234/790 [11:52<30:39,  3.31s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-233 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|████▉     | 392/790 [19:25<12:27,  1.88s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-391 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 58%|█████▊    | 456/790 [22:09<10:41,  1.92s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-455 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 58%|█████▊    | 457/790 [22:11<10:15,  1.85s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-456 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 58%|█████▊    | 460/790 [22:20<15:35,  2.83s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-459 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 66%|██████▌   | 519/790 [24:34<10:06,  2.24s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-518 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 86%|████████▌ | 681/790 [30:58<03:45,  2.07s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-680 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 86%|████████▋ | 682/790 [30:59<03:17,  1.83s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-681 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 86%|████████▋ | 683/790 [31:01<03:14,  1.82s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-682 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 94%|█████████▍| 742/790 [33:43<01:51,  2.32s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-741 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 94%|█████████▍| 745/790 [33:49<01:37,  2.16s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-744 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 98%|█████████▊| 774/790 [34:51<00:33,  2.10s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-773 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 98%|█████████▊| 775/790 [34:52<00:26,  1.80s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-774 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 790/790 [35:32<00:00,  2.70s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2010-2015\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  7%|▋         | 44/664 [01:42<21:00,  2.03s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-43 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  7%|▋         | 46/664 [01:45<17:20,  1.68s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-45 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 11%|█         | 73/664 [02:48<21:14,  2.16s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-72 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 21%|██        | 137/664 [06:12<20:06,  2.29s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-136 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 28%|██▊       | 184/664 [08:21<16:49,  2.10s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-183 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 36%|███▋      | 242/664 [10:43<11:03,  1.57s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-241 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 37%|███▋      | 243/664 [10:45<11:19,  1.62s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-242 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 38%|███▊      | 254/664 [11:08<13:56,  2.04s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-253 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 46%|████▌     | 307/664 [13:15<11:59,  2.02s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-306 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 63%|██████▎   | 421/664 [17:39<10:07,  2.50s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-420 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 82%|████████▏ | 546/664 [23:35<04:02,  2.06s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-545 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 82%|████████▏ | 547/664 [23:37<04:07,  2.11s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2010-546 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 664/664 [28:18<00:00,  2.56s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2014\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 790/790 [33:55<00:00,  2.58s/it]  \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2015\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  7%|▋         | 46/664 [01:51<20:40,  2.01s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-45 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 11%|█         | 73/664 [02:55<22:36,  2.29s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-72 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 79%|███████▉  | 527/664 [22:39<03:59,  1.75s/it]  "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-526 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 664/664 [28:36<00:00,  2.59s/it]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "start: 2013-2016\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 31%|███▏      | 178/568 [07:39<11:25,  1.76s/it] "
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-177 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 57%|█████▋    | 321/568 [12:58<08:47,  2.13s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "gpt3.5-2013-320 :  Expecting value: line 1 column 1 (char 0)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 568/568 [22:45<00:00,  2.40s/it]\n"
     ]
    }
   ],
   "source": [
    "# After 5 years\n",
    "\n",
    "for i in [1998, 2001, 2004, 2007, 2010, 2013, 2016]:\n",
    "    model = ft_model[i]\n",
    "\n",
    "    for j in range(i+1, i+6):\n",
    "        testset = test_data[j]\n",
    "        if true_label[f'year{i}-{j}']:\n",
    "            continue\n",
    "\n",
    "        print (f'start: {i}-{j}')\n",
    "\n",
    "        for k, message in tqdm(enumerate(testset), total=len(testset)):\n",
    "            try:\n",
    "                true, prediction = get_answers(model, message)\n",
    "            except Exception as e:\n",
    "                print (f'gpt3.5-{i}-{k} : ', e)\n",
    "                continue\n",
    "            else:\n",
    "                true_label[f'year{i}-{j}'].append(true)\n",
    "                prediction_label[f'year{i}-{j}'].append(prediction)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 157,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pickle \n",
    "\n",
    "with open('true_label_year2_save.pickle', 'wb') as f:\n",
    "    pickle.dump(true_label, f)\n",
    "\n",
    "with open('prediction_label_year2_save.pickle', 'wb') as f:\n",
    "    pickle.dump(prediction_label, f)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 156,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "dict_keys(['year1998-2017', 'year1998-2018', 'year1998-2019', 'year1998-2020', 'year1998-2021', 'year1998-2022', 'year2001-2017', 'year2001-2018', 'year2001-2019', 'year2001-2020', 'year2001-2021', 'year2001-2022', 'year2004-2017', 'year2004-2018', 'year2004-2019', 'year2004-2020', 'year2004-2021', 'year2004-2022', 'year2007-2017', 'year2007-2018', 'year2007-2019', 'year2007-2020', 'year2007-2021', 'year2007-2022', 'year2010-2017', 'year2010-2018', 'year2010-2019', 'year2010-2020', 'year2010-2021', 'year2010-2022', 'year2013-2017', 'year2013-2018', 'year2013-2019', 'year2013-2020', 'year2013-2021', 'year2013-2022', 'year2016-2017', 'year2016-2018', 'year2016-2019', 'year2016-2020', 'year2016-2021', 'year2016-2022', 'year1998-2002', 'year1998-1999', 'year1998-2000', 'year1998-2001', 'year1998-2003', 'year2001-2002', 'year2001-2003', 'year2001-2004', 'year2001-2005', 'year2001-2006', 'year2004-2005', 'year2004-2006', 'year2004-2007', 'year2004-2008', 'year2004-2009', 'year2007-2008', 'year2007-2009', 'year2007-2010', 'year2007-2011', 'year2007-2012', 'year2010-2011', 'year2010-2012', 'year2010-2013', 'year2010-2014', 'year2010-2015', 'year2013-2014', 'year2013-2015', 'year2013-2016'])"
      ]
     },
     "execution_count": 156,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "true_label.keys()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "time_dict = {\n",
    "    'days': 24,\n",
    "    'day': 24,\n",
    "    'weeks': 24*7,\n",
    "    'week': 24*7,\n",
    "    'hours': 1,\n",
    "    'hour': 1,\n",
    "    'h': 1,\n",
    "    'minute': 1/60,\n",
    "    'min': 1/60,\n",
    "    's': 1/3600\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Metric(object):\n",
    "    def __init__(self):\n",
    "        self.n_true = 0\n",
    "        self.n_false = 0\n",
    "\n",
    "    def update(self, answer: bool):\n",
    "        if answer:\n",
    "            self.n_true += 1\n",
    "        else:\n",
    "            self.n_false += 1\n",
    "\n",
    "    def update_list(self, true_list, pred_list):\n",
    "        if not isinstance(true_list, list) or not isinstance(pred_list, list):\n",
    "            if true_list == pred_list:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return \n",
    "\n",
    "        for ls in pred_list:\n",
    "            if ls in true_list:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "\n",
    "    def update_temp(self, true, pred):\n",
    "        if not isinstance(true, str) or not isinstance(pred, str):\n",
    "            if true == pred:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return\n",
    "\n",
    "        if '°C' not in true or '°C' not in pred:\n",
    "            if true == pred:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return\n",
    "\n",
    "        true = float(true.replace('°C', '').strip())\n",
    "        pred = float(pred.replace('°C', '').strip())\n",
    "\n",
    "        if abs(true-pred) <= 10:\n",
    "            self.n_true += 1\n",
    "        else:\n",
    "            self.n_false += 1\n",
    "\n",
    "    def update_time(self, true, pred):\n",
    "        if not isinstance(true, str) or not isinstance(pred, str):\n",
    "            if true == pred:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return\n",
    "        \n",
    "        unit_true = regex.search(r\"(?<=\\b|\\d)(days|day|weeks|week|hours|hour|h|minute|min|s)\\b\", str(true))\n",
    "        unit_pred = regex.search(r\"(?<=\\b|\\d)(days|day|weeks|week|hours|hour|h|minute|min|s)\\b\", str(pred))\n",
    "\n",
    "        if not unit_pred or not unit_true:\n",
    "            if true == pred:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return\n",
    "\n",
    "        times_true = time_dict[unit_true.group()]\n",
    "        times_pred = time_dict[unit_pred.group()]\n",
    "\n",
    "        true = float(true.split()[0]) * times_true + 1e-6\n",
    "        pred = float(pred.split()[0]) * times_pred + 1e-6\n",
    "\n",
    "        if true/pred < 2 and true/pred > 0.5:\n",
    "            self.n_true += 1\n",
    "        else:\n",
    "            self.n_false += 1\n",
    "        \n",
    "    def update_pressure(self, true: str, pred: str):\n",
    "        if not isinstance(true, str) or not isinstance(pred, str):\n",
    "            if true == pred:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return\n",
    "        \n",
    "        if 'atm' not in true or 'atm' not in pred:\n",
    "            if true == pred:\n",
    "                self.n_true += 1\n",
    "            else:\n",
    "                self.n_false += 1\n",
    "            return\n",
    "\n",
    "        true = float(true.replace('atm', '').strip())\n",
    "        pred = float(pred.replace('atm', '').strip())\n",
    "\n",
    "        if abs(true-pred) <= 1:\n",
    "            self.n_true += 1\n",
    "        else:\n",
    "            self.n_false += 1\n",
    "\n",
    "    def accuracy(self, ):\n",
    "        return self.n_true / (self.n_true + self.n_false)\n",
    "\n",
    "def evaluate(true, prediction, metric: Metric):\n",
    "    if not isinstance(prediction, dict):\n",
    "        metric.update(False)\n",
    "        return\n",
    "    # precursor\n",
    "    try:\n",
    "        metric.update_list(true.get('precursors', None), prediction.get('precursors', None))\n",
    "    except Exception as e:\n",
    "        metric.update(False)\n",
    "\n",
    "    # solvent\n",
    "    try:\n",
    "        metric.update_list(true['solvent'], prediction['solvent'])\n",
    "    except:\n",
    "        metric.update(False)\n",
    "    \n",
    "    # conditions\n",
    "    metric.update_temp(true.get('temperature', None), prediction.get('temperature', None))\n",
    "    metric.update_time(true.get('time'), prediction.get('time'))\n",
    "    metric.update_pressure(true.get('pressure'), prediction.get('pressure'))\n",
    "\n",
    "    # method\n",
    "    metric.update(true.get('synthesis_method') == prediction.get('synthesis_method'))\n",
    "    metric.update(true.get('cooling') == prediction.get('cooling'))\n",
    "    metric.update_list(true.get('washing'), prediction.get('washing'))\n",
    "    metric.update(true.get('filtration') == prediction.get('filtration'))\n",
    "    metric.update(true.get('drying') == prediction.get('drying'))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 158,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "year1998-2017\n",
      "year1998-2018\n",
      "year1998-2019\n",
      "year1998-2020\n",
      "year1998-2021\n",
      "year1998-2022\n",
      "year2001-2017\n",
      "year2001-2018\n",
      "year2001-2019\n",
      "year2001-2020\n",
      "year2001-2021\n",
      "year2001-2022\n",
      "year2004-2017\n",
      "year2004-2018\n",
      "year2004-2019\n",
      "year2004-2020\n",
      "year2004-2021\n",
      "year2004-2022\n",
      "year2007-2017\n",
      "year2007-2018\n",
      "year2007-2019\n",
      "year2007-2020\n",
      "year2007-2021\n",
      "year2007-2022\n",
      "year2010-2017\n",
      "year2010-2018\n",
      "year2010-2019\n",
      "year2010-2020\n",
      "year2010-2021\n",
      "year2010-2022\n",
      "year2013-2017\n",
      "year2013-2018\n",
      "year2013-2019\n",
      "year2013-2020\n",
      "year2013-2021\n",
      "year2013-2022\n",
      "year2016-2017\n",
      "year2016-2018\n",
      "year2016-2019\n",
      "year2016-2020\n",
      "year2016-2021\n",
      "year2016-2022\n",
      "year1998-2002\n",
      "year1998-1999\n",
      "year1998-2000\n",
      "year1998-2001\n",
      "year1998-2003\n",
      "year2001-2002\n",
      "year2001-2003\n",
      "year2001-2004\n",
      "year2001-2005\n",
      "year2001-2006\n",
      "year2004-2005\n",
      "year2004-2006\n",
      "year2004-2007\n",
      "year2004-2008\n",
      "year2004-2009\n",
      "year2007-2008\n",
      "year2007-2009\n",
      "year2007-2010\n",
      "year2007-2011\n",
      "year2007-2012\n",
      "year2010-2011\n",
      "year2010-2012\n",
      "year2010-2013\n",
      "year2010-2014\n",
      "year2010-2015\n",
      "year2013-2014\n",
      "year2013-2015\n",
      "year2013-2016\n"
     ]
    }
   ],
   "source": [
    "result_dict = defaultdict(list)\n",
    "\n",
    "for key in true_label.keys():\n",
    "    true, prediction = true_label[key], prediction_label[key]\n",
    "\n",
    "    print (key)\n",
    "    #metric = Metric()\n",
    "    for t, p in zip(true, prediction):\n",
    "        if t is None or p is None:\n",
    "            continue\n",
    "        metric = Metric()\n",
    "        evaluate(t, p, metric)\n",
    "        result_dict[key].append(metric.accuracy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 159,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "dict_keys([1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022])"
      ]
     },
     "execution_count": 159,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_data.keys()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 160,
   "metadata": {},
   "outputs": [],
   "source": [
    "summary_dict = defaultdict(list)\n",
    "n_test = {v:len(k) for v, k in test_data.items()}\n",
    "\n",
    "for i in [1998, 2001, 2004, 2007, 2010, 2013, 2016]:\n",
    "    total_value = []\n",
    "    near_value = []\n",
    "\n",
    "    for j in [2017, 2018, 2019, 2020, 2021, 2022]:\n",
    "        value = result_dict[f'year{i}-{j}']\n",
    "        total_value.extend(value)\n",
    "\n",
    "    for j in range(1, 6):\n",
    "        value = result_dict[f'year{i}-{i+j}']\n",
    "        near_value.extend(value)\n",
    "\n",
    "    # summary_dict['total'].append(np.sum(total_value)/np.sum(list(n_test.values())))\n",
    "    summary_dict['2017'].append(np.mean(total_value))\n",
    "    summary_dict['near5'].append(np.mean(near_value))\n",
    "    #summary_dict['std'].append(np.std(total_value))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 225,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "The PostScript backend does not support transparency; partially transparent artists will be rendered opaque.\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP4AAADqCAYAAACC0fjcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAA9hAAAPYQGoP6dpAABGS0lEQVR4nO2dd1hU1/b3v9MY6tCkKCDFhhobCtLBTtRoIGrUxIg9ptxEo/dqjJUYTK4t92ePRkXjVV+jibkK9ooxxoiosYCCSBGkztCmr/cPwsSRGWSkw/48z3lgzj5773Vmzvecs9taHCIiMBiMVgW3sQ1gMBgNDxM+g9EKYcJnMFohTPgMRiuECZ/BaIUw4TMYrRAmfAajFcKEz2C0QpjwGYxWSIMLX61WY+rUqQgICEBYWBiePXumlf6Pf/wDvr6+CAwMxP379wEAe/bsQZcuXRAaGorQ0FA8fPiwoc1mMFoUDS78I0eOwMTEBPHx8ZgyZQqio6M1aYmJibh16xauXr2KZcuWYfny5QCAGzduYNOmTTh//jzOnz+Pjh07NrTZDEaLgt/QFcbHx2Po0KEAgLCwMKxatUqT1q5dOxgbG0OhUKC4uBgCgQAAkJCQgJSUFCxduhQjR47EggULdJYtk8kgk8k0n9VqNQoKCmBrawsOh1OPZ8VgND5EhOLiYrRr1w5cbvXP9AYXvkQigUgkAgBYWFiguLhYkyYQCCCTydClSxeIxWIcPXoUADBixAi88847sLOzw5tvvgkvLy/NzeN5oqOjNW8JDEZrJT09Hc7OztUe0+DCF4lEGrEXFxfD0tJSkxYTEwMPDw+cOXMGOTk5GDRoEG7cuIGZM2dqjhsxYgQSExN1Cn/hwoWYO3eu5rNYLEb79u2Rnp6uudkwGC0ViUQCFxcXWFhYvPTYBhe+n58fTp48iVGjRiE2Nhb+/v6aNCsrK1hYWIDL5cLa2hpKpRIymQw9evRAQkICbGxscPbsWcyePVtn2UKhEEKhsMp+kUjEhM9oNdSkWdvgwo+IiEBsbCwCAgIgEAiwf/9+fPbZZ4iMjMTEiRNx8eJFBAQEQKlUYvHixbC0tMSGDRsQFhYGoVCIQYMGYdCgQQ1tNoPRouC0ZEccEokElpaWEIvFrfKJr1KpoFAoGtsMRj0hEAjA4/E0nw253hv8ic9oGEpKSpCRkYEWfF9v9XA4HDg7O8Pc3NzgvEz4LRCVSoWMjAyYmprCzs6ODWW2QIgIubm5yMjIQKdOnbSe/DWBCb8FolAoQESws7ODiYlJY5vDqCfs7Ozw+PFjKBQKg4XP5uq3YNiTvmVTm9+XCZ/RJJg1axZCQ0Ph5uYGT09PhIaG4osvvqhx/s2bN9eq/trmb26wXv0WiFQqRWpqKtzd3WFsbNzY5hjEsmXL4ObmhsjISIPyubm54fHjx69cb23zNwYv/s6GXO/sic9osqSlpSEsLAyhoaF444038OzZM0ilUowaNQqhoaHw9/dHYmIiVq1ahezsbHz66ada+X/88Uf4+vrC399fk6arTH35WzTUghGLxQSAxGJxY5vSoJSXl9Pdu3epvLy8sU0xmKVLl9LOnTuJiGjs2LF0+vRpIiI6fPgwffDBB3Tnzh0aNGgQlZSUUGJiIl28eJGIiFxdXauUNWbMGIqNjSUioq1bt5JcLtdZpr78TZ0Xf2dDrnf2xGc0We7cuYMVK1YgNDQUa9euxdOnT9G9e3dMnDgRY8aMwYIFC6rt4Fq9ejWOHDmCkJAQJCUlgYh0ltkaYcN5jCaLp6cnvvjiC3h5eeHmzZsafw1yuRyxsbFITEzEJ598gvPnz+ucqLR9+3Z89dVXsLW1RXh4OK5cuaKzTACtbqITEz6jybJ69WrMnj0b5eXlUCgU2LJlCzp37oylS5di3759ICJ8/vnnAAAPDw/MnDkT27Zt0+Tv168fwsLCIBKJ4ODggP79++ssU1/+lgzr1W+BNOdefUbNYb36DAbDIJjwGYxWCBM+g9EKYcJnMFohryz83NzcurSDwWA0IAYJv6ysDB988AHMzMzg5uaG5ORk9OzZE48ePaov+xgMRj1gkPA/+eQTFBQUICEhAUZGRnB3d8fQoUPx/vvv15d9jAai6M5N3P/PV0hc8gnu/+crFN25+cplnT9/Hra2tsjOztbsi4yMxPnz5w0qR9eKOaVSCQcHB01UpQ0bNryyna0Zgybw/PLLL0hJSYGpqSk4HA74fD6io6Nhb29fX/YxGoCiOzfxeN93ADgACNLsLDze9x3cJs6A1Wu9X6lMHo+H999/Hz/99NMr2/X1119X8aj8559/YujQodizZ88rl8sw8IlvZmZWJdZdTk4OrK2t69QoRt2ilstRlvlE75Z14qe/jiStv1knftabRy2XV1vn8OHDoVQqsXfvXq39crkcU6ZMQUhICIKCgnDt2jWIxWJ07doVSUlJuHLlCgIDA/Hdd98hOzsb48eP18qfkJCABw8eIDQ0FGPHjkVOTo5W+oQJE3DmzBkAwIULFzBjxgwUFhYiIiICAwYMwKBBgzSxF9etW4eBAwfC398f7733HoCKN5M33ngDwcHBePDgAQICAhAcHIyRI0dCIpEY+M03XQx64n/00UcICwvD/PnzoVQqcejQIXz99dfsVb+JI83NRtLGrw3OJ89/pjdf5w//BVOn9tXm37ZtG4KCgrTcoW/fvh3Ozs7YuXMnsrKy8MYbb+CPP/7Atm3bMH36dJSWluLHH3+Em5sbVq5cif3792uV2b59eyxevBhvvPEG9u3bhzlz5mDfvn2a9KlTpyImJgaDBg3C7t27MWPGDERHR2PEiBGYNm0aEhIS8Mknn+Do0aMoLS3V3CT69OmDzMxMAMCwYcPw0UcfYePGjRgwYACWL1+OuLg4FBQUtJgZoAYJf86cOXBwcEBMTAxcXFywefNmzJ49G1OnTq0v+xh1gLGdIzp/+C+96Y/3fw95ftVRGqGtPVzHT9Fb5sto164dli1bhpkzZ8LW1hZAxYq7+Ph4XLp0CUBFNCWZTIagoCAQEXr37g03Nze9Zfbv3x9GRkYAgDfffBMrV67USh80aBDmzJmD3Nxc/Pnnn/Dz80NUVBQuXLigaR6Ul5eDx+OBiDBx4kSYm5tDLBZrXJF369YNQMVNJDo6GkOGDIGDgwNWr1790nNuLhgk/E8//RRffvklJk6cWF/2MOoBrpFRtU/ndsPerGjjczgAkeZv27DRL32qv4xJkybh0KFDOH78OCIjI+Hp6YmOHTti7ty5KC4uxurVqyEUCrF371507NgRT548waVLlxAUFAQulwu1Wq0VAHLhwoXo3r07Zs2ahTNnzsDLy0v7XLlcjBkzBh9//DHGjBkDoGKVX2BgICIiIpCRkYEDBw7g1q1bOHv2LM6dO4eCggJ0795ds0Kvsr6ff/4ZgwcPxooVK/Dtt9/iu+++w7Jly2r1fTQZDFn4b2dnV2vnDiqViqZMmUL+/v40bNgwysnJ0Ur/+OOPqX///hQQEED37t0jIqLLly+Tt7c3+fr60ubNm2tcF3PEUfPfqvB2At37diXdXPwPuvftSiq8k/DK9Z87d44mT56s+fz06VOysbGhc+fOkVQqpXfffZdCQkKod+/e9MMPP1Bqaip1796dioqKKCUlhbp160ZFRUU0adIkev3117XKzsnJoSFDhlBISAgNHz6cnj59WqX+x48fk5GREWVnZxMRUV5eHo0ePZpCQkKob9++dOrUKSotLaWQkBDy9vamwMBA8vHxofj4eJo8eTKdO3eOiIiSkpLI39+fBgwYQIMHD6aHDx++8ndSH9TGEYdBwo+MjKSAgABatWoV7dq1i3bv3q3ZasqhQ4c0Xk/2799Pn376qSbt5s2bFBISQkREp06dovHjxxMRkZeXF2VkZJBMJqN+/frRs2fPalQXE37z88BTFzx8+JDCw8Mb24x6pzbCN+hV//HjxxAIBIiLi9Paz+FwNL2iLyM+Pl4T6TYsLAyrVq3SpLVr1w7GxsZQKBQoLi6GQCCARCKBWq2Gk5MTACAwMBBXrlzB6NGjq5Qtk8kgk8k0n1tSLyyjZhw8eBBffvklYmJiGtuUJo1Bwj937lytK5RIJJqeUQsLC03IbKAiFphMJkOXLl0gFotx9OhRreN15Xme6OhoLF++vNY2Mpov48aNw7hx4xrbjCaPQeP4KpUKUVFR6Ny5M0xNTeHm5oaFCxcaFJhRJBJphFtcXKyJew8AMTEx8PDwwMOHD3Hnzh3MmDGjitBfzPM8CxcuhFgs1mzp6emGnB6D0WowSPiLFy/G//73P2zYsAEJCQnYvHkzzp49q3F/VBP8/Pxw8uRJAEBsbCz8/f01aVZWVrCwsACXy4W1tTWUSqUmNFBGRgbkcjkuXrwIb29vnWULhUKIRCKtjcFg6MCQzgQXFxdNT2klWVlZZG9vX+MylEqlplc/JCSEnj59SnPnzqVbt26RQqGgadOmkb+/P/n4+FBMTAwRVfTq+/j4kJeXF/3nP/+pcV2sc691du61FmrTuWeQzz0HBwc8evRIKyyvRCJB586dtRZkNBWYzz3mc68l02A+90aNGoVJkybh0aNHUCgUePjwISZPnowRI0bU6gQYjc+N5AKs2H0LH6y/hhW7b+FGcsErl3X+/Hk4OjpqVtD17t0bH374YR1aWxV9se9OnTqFbdu2QSqVYty4cQgJCYGvry+uXr0KADh8+DC8vb3Rv39/HD16VCvvnDlzsGvXLk05lecTGBgIIyOjKj758/Ly8PrrryM0NBTBwcFISkoCAGzatAne3t7w8/PT1Hvt2jXNOoC33noL5eXlem2cOnUqysvL6+y7AmDYq75YLKZx48aRQCAgLpdLxsbGNHny5Cb7Ks1e9Wv2qv9HUj7NWH2VZq6+qvX3j6T8V6r/xQk8arWaAgMD6datW69UXk3QFQlHpVLR4MGDSalU0saNG2nx4sVERHT//n3y9fUluVxOnp6eVFxcTGKxmHr06EFyuZwKCgpoxIgR5OHhoYnq8zxLliyh1atXV9k/f/582rFjBxERxcXF0fjx4+np06fUt29fUigUlJaWRv379yciIm9vb3r06BERES1YsIC2bNmi00YiotOnT9OKFSuq1Ndg4/gikQgbNmzAzp07IZFIUFBQgDZt2rSq1+jmiEyhQnaBVG/64YtPALy4Ng84fCkdtiKhzjyONsYQCmoWk72kpETz+imXyzFr1iykpKRArVZjzZo18PHxwU8//YSVK1dCpVJh2LBhiI6Oxv79+/Htt9+Cy+Xi9ddfxxdffIFly5YhJSUFubm5yMjIwM6dO3H69GlN7Lv169dr6j116hRee+018Hg8TJo0SRN1R6lUwsjICPfu3YOnp6em6dqhQwfcvXsXNjY2WLRoEU6cOFHlXFJSUhAXF4crV65USVu4cCFMTEy06rh27RqCgoLA5/PRvn17yGQyiMVi/Pzzz2jbtq3WsWPGjKliIwAMGDAAc+bMwRdffFFnoc8NEv6RI0cwefJkXLhwAX369MF///tfrFy5Ev/9738xZMiQOjGIUfdkF0ixcu8dg/M9K9Sfb9G7r8HVwUxv3ri4OISGhiI7OxtmZmb4/PPP4erqik2bNlVZnffbb79h7ty5uH79OmxsbLBkyRKkpaXhq6++wrVr1yAUChEeHo7ff/8dAGBjY4OYmBjs3bsX27dvx5YtW7BlyxYt0QMVy3J79eoFoGL+B1DhMm7SpEn497//rXeOSK9eveDi4qJT+GvXrsX8+fM1o03PU7k8/eHDh5g3bx4OHTqEhIQEnXU4OzsDAI4ePYozZ85g+fLlMDU1rWIjULF2wNbWFklJSejSpYve79wQDBL+v/71L8TFxaFPnz4AKtpAPj4+mDFjBu7evVsnBjHqHkcbYyx69zW96d/9LxnPimRV9ttbG2PGiI56y6yOsLAw7Nq1CxkZGRg6dCg6deoEQPfqvMzMTDg4OMDGxgYAsGLFCly7dg05OTkICwsDAIjFYiQnJwOARszOzs6QSvW/yeTl5SE0NFTzOSkpCWPGjMHKlSsxaNAg3Lp1q8ZzRICKeSynT5/GunXrAFS8yYwcORIAMGbMGHz00Ue4evUqpk6dil27dqF79+549OhRlToqbwTbtm3D999/j7i4OI3oX7SxkrZt26Kg4NX7XV7EIOFnZ2ejf//+Wvv69+/fJHv0GX8jFPCqfTpHBLfHlqPJf/nfgebvW0Eu1earCc7Ozti0aRPefvtt3LlzR+fqPBcXF+Tl5WmewBMnTkR0dDTc3d1x+vRp8Pl8bN26FV5eXkhKStL5uks6Bqfs7OxQVFQEAEhPT8eoUaMQExMDHx8fABWr9u7fv4/i4mIQEe7fvw9PT0+953L79m307NkTAoEAAGBubq7lTuzGjRuYNm0ajh49io4dK26Y3t7eiIqKgkKhwNOnT8HlciESibB161YcOXIEZ86cgZmZmV4bKykqKqpTT1cG9er7+flh0aJFmpl6CoUCS5cuhZ+fX50ZxGh4vDrZ4P1RneBkZwo+jwMnO1PMHtUJfTrZ1En5lb3hX375JWbNmoWEhARNz3eXLl3A5XKxdu1aDBs2DP7+/ujcuTNcXV3x4YcfIiQkBD4+Prhy5Qo8PDz01lEZ++55goODce3aNQBAVFQUSkpK8M9//lPjvcfIyAgrV67EwIEDERoaiqioKI2odZGcnFytDQsWLIBMJsP06dMRGhqKjz/+GG3btsWUKVMQEBCA8PBwrFu3DjKZDPPmzUNeXh5GjBiB0NBQ7NixQ6eNQMVNLScnBx06dDDka6+el3b/Pcfjx4+pX79+JBQKqV27dmRkZEQBAQGUnp5uSDENBuvVb90TeJRKJQ0YMICUSmVjm1IrTpw4QV9++WWV/Q3Wq+/q6orff/8dqampyMnJgZOTE1xcXOruLsRg1CE8Hg///Oc/sWPHjipvA82Jffv2YePGjXVaZo2F/+zZMzx79gyvvfYa3N3dcerUKfzwww8YMWKEpgOGwWhqtIRrs3ISUV1SozZ+fHw8OnTooPFZtnjxYixevBgmJib48MMPtZwdMpoO1HIjoDNQu9+3RnP1BwwYgHfffRfTpk2DUqmEnZ0dtm7dinHjxuH69euYNm0aEhMTX9mI+qK1ztVXqVRITk6Gqakp7Ozs6mzSB6PpQETIzc1FWVkZOnXqBB6PZ9D1XiPhW1lZoaCgAFwuF7/99huCgoJQUFAAc3NzqNVqWFpa6nWO0Zi0VuEDFWPMGRkZ7KnfguFwOHB2dtbMPDTkejeocw+o8MLj5eWlqaykpARCoe5pnYzGw9zcHJ06dTLISQqjeSEQCHTOIKwJNRK+v78/Nm/ejLfffhu7d+/W8qO/fv16BAQEvFLljPqFx+O98oXBaNnU6FX/3r17GD58OJ48eYLAwECcOHECxsbG6NmzJ54+fYqLFy+ia9euDWGvQbTmV31G66POX/W7du2KR48eIT8/H3Z2dpr9ixYtwuDBgzVRUhgMRvOgxm18LperJXoAePvtt+vcIAaDUf8Y3LnHYDQFiu7cRPbZ45Dl5UDYxgGOA4e/ckjv1ohBi3QYjKZA0Z2beLzvO0izs0BKJaTZWXi87zsU3bnZ2KY1G5jwGc2O7LPH//rvOZ9BHM5z+xkvw6BX/cLCQmzduhUPHz6EWq3WSvv+++/r1DAG48FG7RDYpFJBJZVCUSSuejARpNmZVfIAQJcPF9WXic0Wg4Q/YcIE5ObmYvjw4eDzWfcAo/5RK5RQScuhKpeCKicjVXoKeREuF0TEpijXAIPU++uvv+LJkyfVuidiMGoDqdUoy3wC8d1ESHOegZRKgMMBVygE39wMPGNjqGUyyAsKq2ZWqyHPy4PAygrcahxqMAwUfqdOnZCfn8+Ez6hTSKVCSUoSxHdvQXzvFhSSIvBMzcA1EoAnEoErNAKH+3d3FM/EBEY2gEJSDFIqweHzIRBZAFwuFEVFkD3LBd/CHHwLC/b014NBwg8JCUFQUBAiIiKqjOkvWbKkRmWo1WpMnz4dDx48gIWFBWJiYjS+xE6dOoWVKyvaaEqlEteuXUNaWhoePnyId999F+7u7gCAL7/8EoGBgYaYzmhiqGRSFCfdhfjeLYjv34FaWg6BlQ2sevSBZddeMHP1QNKWVXrz80xMwPvLlfXzcO3toSwuhrK4BKrycgisrOrxLJovBgm/oKAAQ4cORUlJCUpKSjT7DbmrHjlyBCYmJoiPj8eBAwcQHR2t8Vo6ZMgQjZvupUuXIjw8HG3btsXBgwexZMkSTJs2rdqyZTIZZLK/vcVKJBJDTo9RB1Q3vq4oKYbk/m2I7yai+OF9kFIJ47ZOsPMfAMtuPWHS1rnWT2gOhwOBSASeiQkURWLI8/Lx5PAPaBf2JvimtXMc2pIwKHZeXTB37lyEhIRg9OjREIvFCA0NRUJCgtYxKSkpmDBhAq5cuQIej4fIyEhkZWVBKpXC29sb33zzjc7FJ8uWLcPy5cur7H/VufpskohhVI6v4wV/vdZe/SEvyENpWgoAwMy1Ayy79YJlt54Q2rTRW56uHnpDICKoysqgKpeByxfAaeQYWPXs22Jf/+stdp5KpUJUVBQ6d+4MU1NTuLm5YeHChQYt/Xw+iEFlcIEXeTFogb+/P9avX48LFy6gpKQEO3bs0Fn2woULIRaLNVt6erohp6cFmyRiODrH1wEU3vgNPGNTuIRPxGsLo9Fp5hzYBw6sVvR1AYfDAd/MDF3nLIaZe0ekHdiJlN2bICvMr9d6mwMGveovXrwYZ86cwYYNG+Dq6oqUlBQsW7YMSqVSE/XjZYhEIo3YdQUweDFoAQCMGzcOVn+11UaPHo1jx47pLFsoFNaZb4CKi/j5caO/J4mwp/7fT2MiAimUUMlkUFbTtFIU5yPvt3PI++2cZl9Dja8LRFZwnzgd4nu3kHH0AB6s/xKOg0fAzn8AOK102bJBwt+7dy9+//13ODg4AAC6dOmC3r17o3fv3jUWvp+fH06ePIlRo0YhNjYW/v7+WukvBi0AKjoVDxw4AE9PT5w5cwZeXl6GmP1SdL1SSrOzqh7IJomAiCDLfwZlaSlUUhnUcjmgVgMcTsWmo+XIaSJzPiy79oS5R2c8PfU/ZMX9hMLE63AJnwhTp/aNbVqDY9AvIpPJNFE/KjEzMzOozRQREYHY2FgEBARAIBBg//79+OyzzxAZGYkePXroDFqwZcsWREZGQigUolu3bpg8ebIhZr8SHD6/Ygy5SgIHpFK1qieFXFyEkkcPUPzoAUpSHkAhLgIAcI2MwDczBVcoBNfICGqpVOf4ukBk0cAW64cnNIbzyDGw7tUP6Uf2IWnTN7ALGADHQSPAE1YfFqwlYVDn3owZM5CXl4fVq1ejffv2SEtLw/z582FjY6O33d2Y1LSzQ9cTXFVernuSCIcDDpcLIxtrcP+KZgq0rCe+sqwEJY+SUJyShJJHDyDLewYAMGnrDPMOXWDRoTOyTh7RGluvRFVeXmV8XdewG/Dy76y2nXsvq4dUKjyLP4vs08fAN7eAy+i3IeqiP8ZgU6fefO6tWbMGM2bMQNeuXaFSqSAQCDB+/Hit9nhLQd8kEY7ACPKCAsjy8iGwsgT/r2CHzQF9oxQqmRSljx+i+FGF0MuzMwEiCNvYw9yjM9oOeQPmHp3BNzPXlPX09M8669A3vt4U4fB4cAgeAqvuvZHx836k7N4Mq5594TRiDAQWLdtjk0HCF4lEOHDgAKRSqSaIH1fHXb+loO8iFtq1gaKwCIrCIpBCAX4zcOv14lCbNDsTj/d9B6GdA2T5uYBaDYHICuYdusAuYCDMO3SGkaV1Y5vdIAht7eAx5SMU3vwdmcd+xP11K9Du9XDY9PWD+O6tFjmkWyPhf/DBB9i0aROmTJmitz3fmlbncTgcCKytwBEIoJRIoFYooSwvA9+k6T79qw61VaAoFsN55FiYd+wCoa19ix3jfhkcDgc2fXwg6twNmbGHkX5kH55dPgtZbjb+vllWDOm6TZzR7MVfI+FXxsdzc3OrT1uaFRwOBwILc3AFAsgLCpC08Wu4T5oFE4d2jWqXrnYxqVSQZufoPF4tlSL/j0vI/+OSZl9z6K9IVtnhitIDhWQKa04Z/Pkp6MTLrXW5fDNzuI55Dza9ffBo96a/9j4/L4GD7LPH6lT4l878gdibhZpzeb23NYIG9a2z8nVRI+EvXLgQANCtWzdN6N7n2b59e91a1YzgGQshtLcDB3wkb16N9uMmw6pbr8Y2C0DFSjdlSQmUJaV6j2kqQ22GkKyyw1FFL1QKMY/McVTRC6OQ+Eri19uJqFLp2Fnx5L+56CNwuByAwwWHy4W5R5eKpqGxCXjGprr/NzGt2ITGmo7RS2f+wJ6bSoDMAU7Fuey5qQTwR72K/6W/en5+Pv78808AQGRkJBwdHbWis0gkEsyZMwfTp0+vNyObOlw+Hx2nf4Ynh/bg8d5tcBg4HI4DX9fZ690QVExVLYdCIgHUavDNzcHh86EoKqpybFMaaqspV5Qe+HtKMFD5Kn5Z2QEduHngcupmFrreIV0uF3xzc4DUIDVVzGMAIC/M/8t3QDlU0nKoZVK9ZXOFxuCZmOAYggCuZcUcCEAzFyLuZiGCBtXJaejkpcI3MTHB3LlzkZ+fD6lUivfee08rXSgUYsGCBfVmYHOBJzSG28TpyDkXh+zT/4M0OxPtx77X4GPDKpkMCrEYpFCCZ2IMvkgE7l9PdQ6XU+OhtqbAEYvRmv+JCFKFGqUyJSSkQ4zgoIDMsU42CAIeBwIeF/y//k56LIaDjTGsLYzANaAPQyCy0Dmka2RlWeV783jv/SrHkUoFpbQMkqJS5OSX4OcLaVCqCDI1BzLiQQo+ZNDhN4DDQQGZYlXMnSpJC96rm+HGlwrf1NQU169fBwC89957iImJqZOKWyIcDgeOA1+HSVtnpB3cheTN/4b7pFkQ2trXe93S3BxkxR6BPC8fHIEARm3agCc00jqmOQ21AYBaTShTqFAmU6JMroKaAB6XAy4HUOt4qPO5HFiaCqBQqaFUEcrlKkhUSqz/8X5FOo8DOytj2FsZw8G6YrO3NkYxCWEOGV68J/BMTPDExhG/qjuiiCuClVoCP+5DdDHRdv2lJiBfIkNukRS5RRV/n4n//iyVVzYZzMHlAAIeFwIeByY8LnglEpRxjKFVORFE6hIA9TdaZFADLyYmBsnJycjOzta87isUCty7dw8fffRRvRjYHLHs2gOdZ89H6p6tSNr4DVzHT4Woc7d6qUtZWoLsM8eRd+0SjCytIbC2Bs/EuNn2zktKFUh8VIiEh4VIyy8DATDicSEyEcDMiAcjPhdlchVyJLIqeW3NjWAm1L6kiQjTRnVCTpEUzwqlyCms+HsjuQD5EtlfM4yDwIcK1pwyWHPKYPXX3xIIEc/pCPAqmhX5PCv8D954LM+EgKNCEZmiiEwgIROovrsJoEK/NhZGsLMyhpuDGby72MLOSgg7S2PsjU0Bl6v9u/AVMiQpTCqmOldOeeZwYGdM0PVeU1cYJPzPP/8ca9asgYWFRcXiDCKIxWIMHDiQCf8FjO0d0emD+XhycBdSdm9Cu2GjYRc0uM4EqVYqkPfrBWSfiwOI0HboKNj5hSJ52zd1Un5DklNQjpuPCnHzYSFSskoADtDRyQLWZkYwE/Ig4Gn3lZgJ+XAQAYWlCihUagh4XFibCaqIHqh4C7P/68kOd+00hVKNPLEMN36IQRGZovCvLUvVFiV4vonG0fp7R90OtpxSWHLK4c7NhxWnDN1HjYa9pRC2lkLwebr7dl4UPQAorZzQuSgTuVIuJFwziNSlsDNWQ2nlVOPv71UwSPjbt2/H1atXIZFI8N1332Hv3r1Yvnw5UlJS6su+Zg3fxBTuk97H01O/ICvuJ5RlZaB9xDtaU30NhYggvpOArBM/Q15UgDbegXAcNBx88+bTSUcEZJMIdy+l4+bDAjwtkELA56CbqxUmD/NADw8rWJgKdLZxKzET8nUK3RAEfC7a2prgttXfQV8FAOwBtCHC47wynfk44MCyTcVCtaK/tvHuVq9sh9LKCdYAKqZLier1SV+JQd+cUqlEnz59UFBQgN9//x0AsGDBAja+Xw0cLhftho2GSVtnpP+4F8nbcuD+zgwYWRseb7A0/TGyjv+I0rQUiLq8Bo/3ZsPY3rEerH519I2vq4iDdLU1Hqrt8VBlh1IIYXbrGXp1sMKbgS7o5mYJoaDpLHzicjgw4nEgV1XtTHjxDaQ5YrCzzcuXLyMwMBClpaXIysqCkZERysvL68u+FoN1z74wtnNAyp6teLDxG7hPnA5zj041yisvKsDTEz+jMPE6jB3bocOUj2DRqelFJ9Y3vt5OWYg8soAcfIg45ejCy0FH3jMMmP0BeDpef5sK1mZGOvsSrM2avwdfg4S/dOlSjBw5EomJifjkk0/Qt29fCAQCvPXWW/VlX4vCpK0zunz4Lzz+7w48/P4/cBoxBm18g/W2+1XScuRcOInc+HPgmZjAJXwibPr6Ndr8gJehe3wdeEYi+PAfoyM3F204JZoO7KYsesCwvoTmhkFnMHz4cGRmZsLU1BTz589HYGAgioqKEBYWVl/2tTj4ZuboMOUjZMYeQeYvB1H+NAMWHbsg5/xJzUIQh9AwqKRlyD71P6jkUtgHDYZ98OAmtV78+TF2oGLoLU+qu02sAg/Z1j2R/cL+5jD7oy76EpoiNTqjixcv6k0zMzPDpUuXEBwcXGdGtXQ4PB6cR46BSVsnpB/Zh4LrVzRp0uxMpO2v8G1g3ccHbYeMgpFV010lp1CpIS5XoFiqv0uqJbSJWxo1En6lxxu1Wo2MjAzY2trC2dkZ2dnZyMnJQc+ePat4ymW8HNu+fsg5fwLy/Krzy41s7eA6tv49Db0KFbPoVBCXKVAqV4HLASyNBeDzOMgrkVc5viW0iVsaNRJ+amoqAGD27Nlwc3PD/PnzNevwv/32W/z222/1Z2ELQd9CEF2ir9zf1Hz7qdSEG0kFOPXHU2QVSSHgcdDG3AjmxnzNVFgel9Mi28QtDYN+kR9++AGFhYVazjc+/PBDfPHFF3VuWGtB30KQprRqrkyqxOXbuTiTkI3CYjk824vgIBLC1IhXpWOypbaJWxoG/UIeHh7YtWuXVkSbjRs3wtPTs84Nay3oWwjSFFbNPSuS4uyNbMTfyYVSRfDpaovBXo5wsTerdnINo+ljkPC3bNmC8PBwrFy5Eu3atcOTJ0/A5XLxyy+/1Jd9LR59vv0aazENEZCcIcGpP7KR+LAQpsZ8DO7riJBeDrAyf/UZh4ymhUHC9/X1RWpqKq5cuYKcnBw4OjoiICAARrWYgspoGqvmVMRBktoBfyjbI+fAPbS1McY7Q9zh27UNjASsV76lYXBj7ObNm8jKyoJarUZ6ejr2798PAFXW6TOaHrqm0zpzC3FL5YSbSheUwBiu3Hz8I7wLurlZGrR2ndG8MEj4H374Ifbs2YNevXqB/1znE4fDYcJv4uibTsuFChwAXXnZ8OI9gR23FF3chzeytYz6xiDhHzx4EL/++iu6d+/+yhWq1WpMnz4dDx48gIWFBWJiYmBvX+Go4tSpU1i5smIIS6lU4tq1a0hLS4NEIsHUqVNBRBg8eDBWrFjxyvW3VvS5qzKGEpOFV2HKqXngU0bzxyDhm5ubo3372sUZO3LkCExMTBAfH48DBw4gOjpaE5BjyJAhGDJkCICKdQHh4eFo27YtZs6cifXr16Nfv34YMWIEEhMT0atX03Bo2dR4cSptJflSXQ43OSiHECdE2k/45jCVllE7DBL+7NmzERERgffffx92dnZaaTWdshsfH4+hQ4cCAMLCwrBq1aoqx6SkpCAuLg5XrlRMZb179y68vb0BAMOGDcP58+d1Cl8mk0Em+3s1laSa6K2tDT6PA0ULXWLKMByDhL9582YAwLx587T2czicGjvjkEgkmrheFhYWmpDZz7N27VrMnz8fvL8CUz7v1dfCwgJZWToi2QKIjo7G8uXLa2RHa8NYwINCVXWiEJtO2zoxSPiVU3drg0gk0oi9uLgYlpaWWukqlQqnT5/Wisf3/OwwXXkqWbhwIebOnav5LJFINMFAWjMqNaFUpoQxnws1gU2nZcDg97zff/8dH374IcLDw5Gfn481a9bAgIC78PPzw8mTJwEAsbGx8Pf310q/ffs2evbsCYHg7yeRp6cnrl+/DiLCiRMnquSpRCgUQiQSaW0MoKC0YuGMg6UxnG1M4G5nBmcbEyb6VoxBwt+7dy/efPNNWFlZ4ezZs1Aqlfj+++/xr3/9q8ZlREREoKysDAEBAdiyZQsWLVqEzz77DLdv3wYAJCcnw8PDQyvPmjVrMGfOHPj4+KBnz57o27d+wwu1JKQKFYqlStiYGTV5xxeMhsOgW35UVBTi4uLQo0cPbNq0CQ4ODjh58iT69u2Lb76pmXdXHo9XJcDmmjVrNP+PHTu2SpguT09PXLp0CQzDICLkFssg5HNhYcye7oy/MeiJX1BQgK5dK3y9Vba77e3todIZY4zR2IjLlVCoCG3MjZqtn31G/WCQ8AcOHIi5c+dCKv07JlhUVBRCQkLq3DBG7VCq1CgslUNkwm9S3msZTQODhL9hwwbcv38fIpEIYrEYlpaWuHjxIv7v//6vvuxjvCJ5JXJwuRzYmLIFVIyqGNTws7Ozw8mTJ5GdnY309HQ4Ojqy4bImSGWsOXsLoc7oLQyGQcKXSqU4cOAA0tPTof4rNHAlS5YsqVPDGK+Gmgh5JXKYCLgwE7JXfIZuDBJ+REQEUlNT4ePjo+V+i3UcNR2KyhRQqgmOls03cCaj/jFI+JcvX0ZmZiYsLBrfLRSjKk/zy1FUpoCVqQBGfDYHn6Efg66Ovn37Ii0trb5sYdQCIsK+M4/B53JgZcrm3zOqx6An/qpVqxAcHIwBAwZUmS//4qQcRsNy7X4+HqRL4GgpZJ5zGC/FYA88Xl5e6N69u2blHKPxKZMqcfB8Gvp2tkFhYdUgjwzGixgk/AcPHkAsFmt17DEan5/iMyBXqDEu1BVbjyQ1tjmMZoBBCg4LC8Pp06fryxbGK/A4uwQXbuZgVIAzrC3YZB1GzTDoia9WqzFixAh069YNNjY2WsNFZ8+erXPjGNWjVhN+OP0YTnamGNjHsbHNYTQjDBL+qFGjMGrUqPqyhWEgFxJzkJZTin9N6MaW3DIMwiDhV0bNZTQ+4lI5jlzOQFAPO3Rox+ZVMAzDoDb+4cOH4e7uDj6fDx6Pp7UxGpb/d/4J+DwOIoJq5/WY0Tox6In/8ccfY8mSJRg6dCjr2W9E7qWJce1+PiKHecDMhDnYYBiOQVeNSqXCtGnTtKLoMBoWhVKNfWceo5OTBfy6t2lscxjNFIMUvGTJEnzwwQf4xz/+UcWRZW0DbTBqxsnrT5ErluH9UZ3YIhzGK2OQ8EtKSvD9999j+/btWvs5HA5zv9UAFKlNcPy3TAz2coRTG9PGNofRjDGooR4dHY1Tp05BqVRCrVZrNib6+ocIOKvsAnMTAUb6OTW2OYxmjkHCt7S0hK+vL+vYawQequ2Qqm6D8QNcYWzERlEYtcOgV/1PP/0U4eHhmDVrVpWZezWNnccwHDnxcFbRBR7cXPTu6NPY5jBaAAYJ/9tvvwUArTBVgGGx8xiGc0XpASkEGMh/AA5nZGObw2gBGPTOnpqaqnMzRPRqtRpTp05FQEAAwsLC8OzZM6307du3w9fXF3379sW2bdsAAJcuXYKrqytCQ0MRGhqKy5cvG2J2syZXbY4bKhf48lNhyZW+PAODUQMMbqxfunQJM2fOxPDhwxEZGYm4uDiD8h85cgQmJiaIj4/HlClTEB0drUlLTk7Grl27cPHiRcTHx+Pp06cAgBs3bmDJkiU4f/48zp8/j8DAQEPNbpYQAacVnrDmlKEfj3k+YtQdBgn/hx9+QHh4ONq0aYPRo0fD0dER77zzjkHed+Lj4zF06FAAFct8z58/r0k7e/YsvLy8MGHCBISFhWHYsGEAgISEBBw4cADBwcH47LPP9I4iyGQySCQSra05c0fVDllkhcH8B+Bxah6YlMF4GQa18VeuXInY2Fh4e3tr9kVERGDixImYOnVqjcqQSCSayT8WFhaakNkAkJubi8uXL+PSpUt49uwZhg8fjrt378Lf3x+BgYHo2rUr3n//fezYsQMzZ86sUnZ0dDSWL19uyCk1WcpIgIvKjujKfQoXXmFjm8NoYRj0xH/69Cn69Omjtc/Lywt5eXk1LkMkEmnE/mKse1tbW4SEhMDMzAzu7u4QiUTIzc3FuHHj0K1bN3A4HIwePRqJiYk6y164cCHEYrFmS09PN+T0mhSXlB1B4CBEkNzYpjBaIAYJ38fHB6tXr9ba9+9//xv9+vWrcRl+fn44efIkACA2NlYr1r2/vz/OnTsHhUKBvLw8FBYWam4G9+/fBwCcOXMGXl5eOssWCoUQiURaW3MkU22JOyonBPIfwowjb2xzGC0Qg171//Of/2Do0KHYtGkTXFxckJaWBjMzM/zyyy81LiMiIgKxsbEICAiAQCDA/v378dlnnyEyMhK9evXCO++8Az8/PxAR1q1bBx6Phy1btiAyMhJCoRDdunVr0X4BVMTBaYUnHDli9ORlNrY5jBaKQcLv0qULkpOTcenSJeTm5sLZ2Rm+vr4Grdbj8XhVOgPXrFmj+X/+/PmYP3++Vrqfnx+uXr1qiKnNlgSVC/LJHO8YXQNzqsOoLwx61b979y769+8PS0tLjB8/Hj/99BP8/PzY5J1akqyyw25Zf6yXDsQFZSe4cfPgwC1+eUYG4xUxSPgzZszAO++8g969ewOoCLAxfvx4TJ8+vT5saxUkq+xwVNELeWQO1V8/R6raDskqu0a2jNGSMUj4d+7cwbx58zSv9nw+H3PnzkVCQkK9GNcauKL0AEAAKt/rOQAIvyo9Gs8oRovHoDa+u7s7fv75Z4wePVqz7/jx4/DwYBfpyzhiMVrn/jxpqY69HOSRhc48C+rYLkbrxCDhr169Gm+99Rb69OkDJycnZGZm4vbt2/jpp5/qybyWi0yhQmGZQm+6gMeWPjPqD4OEP3jwYCQlJeH48ePIyclBWFgYXn/9dbRpw3y/1RS5Uo2CUjnK5CrweRyIjPmQSJVVjrM2YxFvGfWHwV4zzc3NYW5ujsLCQrz11lu4e/cuE34NkCvVKCyTo1SmAp/LgZ2FEcyFfHA4HJgY8VBYqoBCpYaAx4W1mQBmQubQlFF/GHR13bhxAyNGjICHhwdu3bqF8PBwDBgwAJs2bcKkSZPqy8ZmTXZBOf73ayYyCsvB53LQxtwIFsZ8LScmZkI+EzqjQTE4TPb//d//YcyYMbC2toa7uzvi4uIQGRnJhP8Czwql+N/VTPx2Lw9WZkY6Bc9gNBYGCf/+/fuIiIgAAM0FHBAQgNzc3Lq3rJmSWyTFsatZuHo3FyIzAcYPcEVgD3us2Xe3sU1jMDQYJPzu3bvj4MGDGD9+vGbfsWPH0K1btzo3rLmRL5Hh+NVMxP+ZB3NjPsaGuiKohz2MBKx3ntH0MHiRzuuvv46NGzeitLQUI0eOxPXr13H06NH6sq/JU0xC/KZ0w50diTAV8hAR5IKQXvYQCpgnXEbTxSDhe3l5ITk5GcePH8eoUaPg6OiImJgY2NjY1Jd9TZYSMsJvSnfcVjlBACVGBzljQB8HJnhGs6DGwiciFBQUwNbWVutVXyaTYcGCBVi1alW9GNiYJKvscEXpgUIyhTWnDP78FLTjinFN6YZbKifwoYYfPwV9eOno4RPU2OYyGDWmRg3Qy5cvw9nZGfb29ujdu7fGCealS5fw2muvYc+ePfVqZGOgvXiGhzwyx1FFL2yTBeBPVVv48B9juvAy+vMfw4jDIgkxmhc1Ev6nn36KcePGISEhAb1798bixYuxdetWDB48GEOHDsW9e/fq284GR9/iGWMoMV0YDz9+KoRM8IxmSo1e9R88eIArV67AyMgIGzZsQPv27XHq1CmcOHECoaGh9Wxi/aNrMUy+nsUz5RAiVjSiSgpbPMNoTtRI+BwOB0ZGRgAqpuyWl5fj3Llz6NWrV70a15gIeBzIVVVdWrPFM4yWwCtdxcbGxi1a9ABgbWakZz9bPMNo/tToiU9ESE9PBxHp/AwA7du3rx8LGwkzIR8OIrDFM4wWSY2u4tLSUri5uWkJ3dXVVfM/h8PRG92mOcMWzzBaKjW6qtVqdX3bwWAwGhDWU8VgtEKY8BmMVggTPoPRCmlw4avVakydOhUBAQEICwvDs2fPtNK3b98OX19f9O3bF9u2bQNQMYEoICAA/v7+WLJkSUObzGC0OBpc+EeOHIGJiQni4+MxZcoUREdHa9KSk5Oxa9cuXLx4EfHx8Zo1AfPmzcP69esRHx+P69ev642WK5PJIJFItDYGg1GVBhd+fHw8hg4dCgAICwvD+fPnNWlnz56Fl5cXJkyYgLCwMAwbNgxARegub29vcDgcDBs2TCvP80RHR8PS0lKzubi41PfpMBjNkgYXvkQi0YSvtrCwQHHx3zHicnNzcfnyZcTExGDnzp2YMmUKiEhr/sCLeZ5n4cKFEIvFmi09Pb1+T4bBaKY0+OwUkUikEW5xcTEsLS01aba2tggJCYGZmRnc3d0hEomQm5ur5aDyxTzPIxQKIRQK6/cEGIwWQIM/8f38/HDy5EkAQGxsLPz9/TVp/v7+OHfuHBQKBfLy8lBYWAhbW1t4enri+vXrICKcOHFCKw+DwTCcBn/iR0REIDY2FgEBARAIBNi/fz8+++wzREZGolevXnjnnXfg5+cHIsK6devA4/GwZs0azJgxA1KpFIMGDULfvn0b2mwGo0XR4MLn8Xj4/vvvtfatWbNG8//8+fMxf/58rXRPT09cunSpQexjMFoDbAIPg9EKYcJnMFohTPgMRiuECZ/BaIUw4TMYrRAmfAajFdKi/UpVTvV92WIdaXlJretqiDoaqp6aLG5i35lhdTREPZVpz09x1weHanJUMyUjI4Mt1GG0OtLT0+Hs7FztMS1a+Gq1GllZWbCwsNCa728IEokELi4uSE9P1ywuqg9aUj0t6Vwaqp66qIOIUFxcjHbt2oHLrb4V36Jf9blc7kvvfDVFJBLV68XVEutpSefSUPXUtg59C9hehHXuMRitECZ8BqMVwoT/EoRCIZYuXVrv6/xbUj0t6Vwaqp6GOpdKWnTnHoPB0A174jMYrRAmfAajFcKEz2C0QpjwGYxWCBM+gJ9++glTpkwBAOzcuRO9e/dGcHAwfvnlFwDA9evX4e3tjcDAQERFRQEAioqKMGzYMAQHB2P06NEoLy/XW75UKsW4ceMQEhICX19fXL16FYcPH4a3tzf69++Po0ePAgBycnIwaNAgBAUFYebMmVpRiq9du6aJM1BfdeTm5sLZ2RmPHz+ul3pOnTqF0NBQhIaGIjAwEEZGRpqgKa9aTyVz5szBrl27NJ8XLVqE/v37Izg4GMnJyfVSx6effgp/f38EBwcjJSWl1t+Zvnp0RZeqNdTKmTdvHnXp0oUmT55Mubm51LFjR5JIJCSVSqlfv34klUqpb9++dOPGDSIieu+99+jXX3+l9evX09KlS4mIaNGiRbRlyxa9dWzcuJEWL15MRET3798nX19f8vT0pOLiYhKLxdSjRw+Sy+X00Ucf0cGDB4mI6P3336effvqJiIjWrVtHr732GoWEhNRbHSqVisaNG0edOnWi1NTUequnkiVLltDq1atrXU9BQQGNGDGCPDw8aOfOnUREdP36dRo+fDgREV29epXefPPNOq/j0qVL9NZbbxER0blz52jMmDH1ci5JSUkUEBBAMpmMysvLadmyZXrrMYQWPWW3Jvj4+GD48OHYvXs3UlJS4OXlBQsLCwCAh4cH7t69i7y8PPTp0wcA4Ovri19//RU9evTA8ePHAVT4+hcIBHrrmDRpkmatgFKpxP379xEaGgpzc3MAQIcOHXD37l3Ex8fjyy+/BPB3lKHRo0fD1dUVhw8fxowZM+qtjuXLl+Odd95BTk5Otd9XbesBgJSUFMTFxeHKlSu1rsfGxgaLFi3CiRMnNHmfj9bUv39/vSHXalNHYGAgfH19AVQsirG2tq6Xc3k+ulR+fj5WrVqltx5DaPWv+mPHjtX8KB07dkRiYiLy8/NRWFiIX3/9FWVlZWjXrh2uXr0KtVqNEydOoKysDG3atMGRI0fQrVs3HDt2DKNGjdJbh4WFBczNzZGbm4tJkybh008/1ZqPXRkdSF+UofDw8GpvLLWt48SJEygvL6/2HOrqXABg7dq1mD9/Png8Xq3rcXFxgZ+fn1be5+sG9C9TrU0dAMDn8/H+++/jo48+wtixY+vlXPRFl6otrV74z2NjY4Po6GiMHj0as2bNgo+PD2xtbbF9+3YsXrwYYWFh6NChA2xtbbFixQosW7YMd+/exdatWzF58uRqy05KSsKgQYOwfPlyhIeHawmhMjpQdVGGasKr1rF7925cuXIFoaGhuHnzJsaPHw+xWFwv56JSqXD69GnN07+256OL5+sGUO1KtVeto5ItW7bg4cOHmD17NkpLS+u8Hn3RpWoLE/5zyOVy/Pnnn7h8+TJ27NiBp0+fonPnzoiNjcWhQ4dw4sQJPHnyBKGhobCystL8WE5OTigoKNBbbnp6OkaNGoXt27fjjTfegKenJ+7fv695Mt6/fx+enp7VRhl6GbWpY9++fbh8+TLOnz+P3r17Y//+/XovxNqey+3bt9GzZ8+XvsHUtB5d+Pn54dSpUwCAq1evolu3bnVex7Fjx7Bw4UIAgImJCbhcrt4bTG3q0Rddqra0+jb+8xgZGUEul6Nv374wNjbGypUrweVy0aFDB4SGhsLY2BgTJkyAp6cnoqKiMG3aNKxduxZEhA0bNugtNyoqCiUlJfjnP/8JALCzs8PKlSsxcOBAqFQqREVFQSAQYPHixZg0aRLWrFmDzp07480336yx7Q1RR13Uk5ycDA8PjzqrRxfe3t7o2bOnpg2+c+fOOq9jyJAhOHDgAIKCgqBSqfD111/DxMSkzuvRF12qtrC5+gxGK4S96jMYrRAmfAajFcKEz2C0QpjwGYxWCBM+g9EKYcJnMFohTPhNiNLSUuTl5TW2GS2G6lYZtnaY8F+Rx48fv3KQDn0EBwfjzp07dVpmU4XD4egUZlpaGrp37w4LCwvs3bv3lcs/evSoZqm1oXTv3h2XLl166XHm5uZ48uTJK9XR2LCZe02I/Pz8xjah0bl48SK4XC7EYvFLo8FUR0FBwSsvZvnzzz9rdFxJSd3E9msU6mRxbyskNTWVqvv6vv76a/L09CQzMzNydXWlAwcOEBFRcXExRUREkLW1Nbm6utK8efNIrVbTu+++SxwOh4yNjWn//v1aZS1dupQiIiI0n0tLS8nCwoIyMzOpuLiYZsyYQQ4ODtS+fXv65ptvNMclJSVRWFgYOTo6kpmZGY0aNYokEgkREbm6utL06dPJ2tpaKw9RxfpyV1dXneeqz34iokePHtHQoUPJysqKXnvtNYqLi9OUcfToUerYsSOJRCJaunQpAaiy7v/QoUMkFAqJy+WSmZkZKZVKOnbsGPXs2ZNEIhH5+/vT77//rrGxV69eFBQURLa2tlpl3b17V1NOz549KTU1lRwdHemtt94ia2trunz5Mv32228UHBxMbdq0IZFIRJGRkaRUKjXfzblz54iICACtX7+eHB0dycHBgaKiojT1VJ5DamoqOTk50aJFi8jGxoZcXFxo+/btmuMOHDhAbm5uZGdnR/PmzSM3N7dqfR40BEz4r0h1wj937hy5uLhQRkYGqdVq2rBhAzk6OhIR0TfffENjx44luVxOWVlZ1L59e7pw4QIRaV9wz3P37l0yNTWl0tJSIiI6ePCgxinHjBkz6M033ySxWEypqank6empuXGEhITQl19+SSqVinJycqhr164ahyGurq40ZswYkkqlmpvB8/brE74++xUKBXXt2pWio6NJLpfT2bNnydramtLS0igzM5PMzMzo2LFjJJVK6YMPPtApfCKinTt3as4tMTGRzMzMKC4ujhQKBX3//ffUpk0bys/Pp3PnzhEAOnz4MInF4mrLqbR/7dq1VFpaSgqFgtzc3CgmJoaIiFJSUqhNmzaaG9WLwp84cSKVlZXR2bNnicfjUXp6uiatUvgAaP78+SSXy2n37t1kYWFB5eXldP/+fbKwsKALFy6QVCql2bNn6z33hoQJ/xWpTvilpaWUmZlJarWa0tPTaceOHZpjt27dSh4eHrR3717Kz88nlUqlyadP+EREPXv2pEOHDhER0dixY2nz5s2kVqvJ2NiYkpKSNMdt3bqVwsLCiIgoLS2NZDIZlZSU0I0bNygwMFDjwcXV1ZX27duns67qhK/P/itXrpCTk5NWORMmTKBVq1bR9u3bKTg4WLO/qKioRsJftGgRTZgwQSvd19eX9uzZQ+fOnSNjY2PN20Z15VTan5WVpUl/+PChxpZff/2VOnfuTLt27dJ8N88L/9q1a5p8Tk5Omhv1i8J/9uwZEREpFAoCQGlpabR8+XJ69913NflLSkqIz+c3uvBZG78e4HA4WLRoEY4ePYr27duja9eumrTp06ejoKAA0dHRmDx5MoYPH44dO3bAzs6u2jLffvtt/Pjjj3j99ddx4sQJbNy4Ebm5uZBKpfD29tYcp1ar4e7uDqCirTp8+HAUFBSgd+/eEIvFWu1eR0dHg89Nn/3p6enIzs6GlZWV5lilUglbW1uoVCo4OTlp9ltaWmodp4/c3Fy4urpq7XN1dUVGRgacnZ1hb29vUAfr8+db6aVHqVSib9++kMlkevsEnv9t+Hy+lp9CXcfx+RWyUqvVyMzM1ArcamZmVifLamsL69WvB9atW4eMjAykp6cjISFBsxwTAO7du4fx48fjzp07ePToEYqLi2vkTuntt9/G8ePHcfz4cfj6+sLOzg62trYQCARISkpCUVERioqKkJqaiuPHj0Mul2Ps2LFYu3YtsrKycPz48SrLYfWJhsvlQqlUaj4/3+moz35HR0d07txZY0dRURHu3buHqKgoODo6avV+l5WVQSKRvPScnZ2dkZaWprUvNTUV9vb21dqvj8rjMzIyMGvWLPz8889IS0vD4cOHa3QjehWcnZ2Rnp6u+VxeXt4kOnGZ8GtJRkaG1lZaWoqioiIYGxuDz+cjLy8PS5YsAQAoFArs27cPs2bNQnFxMezs7CAQCGBjYwOgIn6aPkF06NABnTp1wooVK/D2228DAHg8Ht5++20sWLAApaWlKCwsxFtvvYWvvvoKMpkMUqkU5ubmICL88ssviIuLg0KheOk5eXh4IDs7G5cvX4ZUKsW3336rSdNnv6+vL+RyObZt2waVSoV79+7B29sbp06dwsiRI3H79m0cOnQIcrkcS5Ys0fvUfJ5x48bh6NGjOHHiBJRKJXbu3Il79+5h+PDhL81b3XcpkUjA4XBgYmIClUqF7du349atWzX6bgxlwoQJ+Pnnn3H58mXI5XJ88cUXWjfVRqNRGxrNmMp23Yvb5s2bKSsri4KDg8nc3JxcXFxoxYoVZG1tTYmJiVRSUkLjx48nGxsbsrS0pPfee4/KysqIiCgqKopMTU1p69atOutcvXo1GRkZUWFhoWZfUVERTZkyhRwcHMjGxoYmT56s6QTcsGED2dvbk7W1NYWEhNCsWbMoPDyciKrvTyAi+uqrr6ht27bUtm1b2rx5s6aNX5399+7do8GDB5OVlRU5OTnRqlWrNOWdPn2aPD09ydzcnP7xj39U6Ymv5Pm2ORHRL7/8Qj169CAzMzPq168fXb58mYiq9kO8SGZmJnXo0IHat2+vsz/m888/J2tra7K1taWRI0fSuHHjaM6cOVW+G7zQF6ErTVf5z+eLiYkhZ2dnatOmDS1YsIAEAgFlZmbqtb0hYI44mhEHDx7E3r17q/hhZzRd0tLSUF5ernGtVV5eDnNzcxQXF8PU1LTR7GKv+s2A0tJS3Lp1C+vXr3+pU09G0+LJkycYMmQIMjIyoFQqER0dDX9//0YVPcCE3yzIzc2Fv78/nJycEBER0djmMAwgKCgIs2fPho+PD2xsbHD16lXs2bOnsc1iPvcYjNYIe+IzGK0QJnwGoxXChM9gtEKY8BmMVggTPoPRCmHCZzBaIUz4DEYrhAmfwWiF/H9nw2uQXtzHOAAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 255.45x235.8 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "mm = 0.0393\n",
    "plt.rcParams['font.family'] = 'arial'\n",
    "plt.rcParams['font.size'] = 7\n",
    "\n",
    "years = [1998, 2001, 2004, 2007, 2010, 2013, 2016]\n",
    "\n",
    "#for year in [2017, 2018, 2019]:\n",
    "#    plt.plot(years, summary_dict[year], color='#CCCCCC')\n",
    "\n",
    "# plt.plot(years, summary_dict['total'], 'o-', color='red')\n",
    "\n",
    "fig, ax = plt.subplots(1, 1, figsize=(65*mm, 60*mm))\n",
    "\n",
    "plt.bar(x=years, height=summary_dict['near5'], width=2, color='#D89484')\n",
    "\n",
    "plt.bar(x=years, height=summary_dict['2017'], width=2, color='#87A1D3')\n",
    "plt.plot(years, summary_dict['near5'], '-o', color='#CF796A', markersize=4, linewidth=1)\n",
    "plt.plot(years, summary_dict['2017'], '-o', color='#658CC8', markersize=4, linewidth=1)\n",
    "\n",
    "#plt.xticks(range(len(years)), years)\n",
    "plt.ylim(0.58, 0.85)\n",
    "\n",
    "plt.xticks(years, years)\n",
    "plt.ylabel('Recommendation Score', fontsize=9)\n",
    "plt.xlabel('Last year used for training', fontsize=9)\n",
    "\n",
    "plt.legend(['Next 5 years', 'Recent (2017-2022)'], loc='upper right', fontsize=7, title='Test set')\n",
    "plt.tight_layout()\n",
    "\n",
    "plt.savefig('year.eps', bbox_inches='tight')\n",
    "plt.show()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "llmminer",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.17"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
