{ "cells": [ { "cell_type": "code", "execution_count": 47, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Defaulting to user installation because normal site-packages is not writeable\n", "Requirement already satisfied: pandas in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (2.2.0)\n", "Requirement already satisfied: tzdata>=2022.7 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n", "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2.8.2)\n", "Requirement already satisfied: pytz>=2020.1 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n", "Requirement already satisfied: numpy<2,>=1.22.4 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (1.26.1)\n", "Requirement already satisfied: six>=1.5 in /Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/site-packages (from python-dateutil>=2.8.2->pandas) (1.15.0)\n", "\u001b[33mWARNING: You are using pip version 21.2.4; however, version 24.0 is available.\n", "You should consider upgrading via the '/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n" ] } ], "source": [ "!pip3 install pandas" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Get the filelist\n", "\n", "For the full results.json" ] }, { "cell_type": "code", "execution_count": 48, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 2217 results.json files\n" ] } ], "source": [ "import glob\n", "\n", "# Specify the path to the folder containing the results.json files\n", "folder_path = \"lm-eval-output\"\n", "\n", "# Use glob to find all the results.json files\n", "results_json_files = glob.glob(f\"{folder_path}/**/results.json\", recursive=True)\n", "\n", "# Show total number of results.json files found\n", "print(f\"Found {len(results_json_files)} results.json files\")\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Process all the results.json\n", "\n", "One file at a time" ] }, { "cell_type": "code", "execution_count": 49, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Processed example: {'name': 'bigscience/bloom-7b1', 'config': {'dtype=bfloat16,trust_remote_code=True': {'confStr': 'dtype=bfloat16,trust_remote_code=True', 'confObj': {'dtype': 'bfloat16', 'trust_remote_code': 'True'}, 'results': {'xcopa': {'acc,none': 0.5709090909090908, 'acc_stderr,none': 0.06135942275478038, 'alias': 'xcopa'}, 'xcopa_et': {'acc,none': 0.482, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_et'}, 'xcopa_ht': {'acc,none': 0.516, 'acc_stderr,none': 0.0223716109825804, 'alias': ' - xcopa_ht'}, 'xcopa_id': {'acc,none': 0.702, 'acc_stderr,none': 0.020475118092988978, 'alias': ' - xcopa_id'}, 'xcopa_it': {'acc,none': 0.528, 'acc_stderr,none': 0.022347949832668093, 'alias': ' - xcopa_it'}, 'xcopa_qu': {'acc,none': 0.512, 'acc_stderr,none': 0.02237662679792717, 'alias': ' - xcopa_qu'}, 'xcopa_sw': {'acc,none': 0.518, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_sw'}, 'xcopa_ta': {'acc,none': 0.592, 'acc_stderr,none': 0.02200091089387719, 'alias': ' - xcopa_ta'}, 'xcopa_th': {'acc,none': 0.552, 'acc_stderr,none': 0.022261697292270132, 'alias': ' - xcopa_th'}, 'xcopa_tr': {'acc,none': 0.512, 'acc_stderr,none': 0.02237662679792717, 'alias': ' - xcopa_tr'}, 'xcopa_vi': {'acc,none': 0.716, 'acc_stderr,none': 0.02018670369357085, 'alias': ' - xcopa_vi'}, 'xcopa_zh': {'acc,none': 0.65, 'acc_stderr,none': 0.021352091786223104, 'alias': ' - xcopa_zh'}, 'xnli': {'acc,none': 0.41204819277108434, 'acc_stderr,none': 0.051535476594892576, 'alias': 'xnli'}, 'xnli_ar': {'acc,none': 0.3377510040160643, 'acc_stderr,none': 0.009479742273956477, 'alias': ' - xnli_ar'}, 'xnli_bg': {'acc,none': 0.3779116465863454, 'acc_stderr,none': 0.009718712281227459, 'alias': ' - xnli_bg'}, 'xnli_de': {'acc,none': 0.41365461847389556, 'acc_stderr,none': 0.009871502159099368, 'alias': ' - xnli_de'}, 'xnli_el': {'acc,none': 0.3650602409638554, 'acc_stderr,none': 0.009650194822749637, 'alias': ' - xnli_el'}, 'xnli_en': {'acc,none': 0.5261044176706827, 'acc_stderr,none': 0.01000840465166064, 'alias': ' - xnli_en'}, 'xnli_es': {'acc,none': 0.4879518072289157, 'acc_stderr,none': 0.010019162857624487, 'alias': ' - xnli_es'}, 'xnli_fr': {'acc,none': 0.478714859437751, 'acc_stderr,none': 0.010012987604500423, 'alias': ' - xnli_fr'}, 'xnli_hi': {'acc,none': 0.4666666666666667, 'acc_stderr,none': 0.00999977679318763, 'alias': ' - xnli_hi'}, 'xnli_ru': {'acc,none': 0.43253012048192774, 'acc_stderr,none': 0.009930409027139453, 'alias': ' - xnli_ru'}, 'xnli_sw': {'acc,none': 0.3855421686746988, 'acc_stderr,none': 0.009755949341224318, 'alias': ' - xnli_sw'}, 'xnli_th': {'acc,none': 0.3437751004016064, 'acc_stderr,none': 0.009520310502882936, 'alias': ' - xnli_th'}, 'xnli_tr': {'acc,none': 0.3522088353413655, 'acc_stderr,none': 0.009574259292495757, 'alias': ' - xnli_tr'}, 'xnli_ur': {'acc,none': 0.42289156626506025, 'acc_stderr,none': 0.009902179034797438, 'alias': ' - xnli_ur'}, 'xnli_vi': {'acc,none': 0.44497991967871486, 'acc_stderr,none': 0.009961210239024633, 'alias': ' - xnli_vi'}, 'xnli_zh': {'acc,none': 0.3449799196787149, 'acc_stderr,none': 0.009528219800053311, 'alias': ' - xnli_zh'}, 'pawsx': {'acc,none': 0.5078571428571429, 'acc_stderr,none': 0.03988534011535243, 'alias': 'pawsx'}, 'paws_de': {'acc,none': 0.5175, 'acc_stderr,none': 0.011176284251254179, 'alias': ' - paws_de'}, 'paws_en': {'acc,none': 0.4145, 'acc_stderr,none': 0.011018419931591767, 'alias': ' - paws_en'}, 'paws_es': {'acc,none': 0.437, 'acc_stderr,none': 0.011094009127418984, 'alias': ' - paws_es'}, 'paws_fr': {'acc,none': 0.5435, 'acc_stderr,none': 0.011140733053371404, 'alias': ' - paws_fr'}, 'paws_ja': {'acc,none': 0.5575, 'acc_stderr,none': 0.01110894141174761, 'alias': ' - paws_ja'}, 'paws_ko': {'acc,none': 0.552, 'acc_stderr,none': 0.011122493197456285, 'alias': ' - paws_ko'}, 'paws_zh': {'acc,none': 0.533, 'acc_stderr,none': 0.01115875256825067, 'alias': ' - paws_zh'}, 'lambada_multilingual': {'perplexity,none': 131.45396740665825, 'perplexity_stderr,none': 95.28024178884175, 'acc,none': 0.38490199883562976, 'acc_stderr,none': 0.07608898792977997, 'alias': 'lambada_multilingual'}, 'lambada_openai_mt_de': {'perplexity,none': 370.91952810475857, 'perplexity_stderr,none': 24.98299339282566, 'acc,none': 0.23015718998641568, 'acc_stderr,none': 0.0058644241714399855, 'alias': ' - lambada_openai_mt_de'}, 'lambada_openai_mt_en': {'perplexity,none': 6.583236525584539, 'perplexity_stderr,none': 0.17481189179976453, 'acc,none': 0.5717058024451775, 'acc_stderr,none': 0.0068939712541951454, 'alias': ' - lambada_openai_mt_en'}, 'lambada_openai_mt_es': {'perplexity,none': 51.02874715706533, 'perplexity_stderr,none': 2.6341920857292744, 'acc,none': 0.36638851154667185, 'acc_stderr,none': 0.0067126579546010565, 'alias': ' - lambada_openai_mt_es'}, 'lambada_openai_mt_fr': {'perplexity,none': 29.56217917543056, 'perplexity_stderr,none': 1.5411073949753211, 'acc,none': 0.4513875412381137, 'acc_stderr,none': 0.0069329758883686235, 'alias': ' - lambada_openai_mt_fr'}, 'lambada_openai_mt_it': {'perplexity,none': 199.1761460704524, 'perplexity_stderr,none': 13.648756866456297, 'acc,none': 0.30487094896176986, 'acc_stderr,none': 0.006413613926848421, 'alias': ' - lambada_openai_mt_it'}, 'xwinograd': {'acc,none': 0.7442121825129242, 'acc_stderr,none': 0.06414679137553342, 'alias': 'xwinograd'}, 'xwinograd_en': {'acc,none': 0.8219354838709677, 'acc_stderr,none': 0.007935777723887321, 'alias': ' - xwinograd_en'}, 'xwinograd_fr': {'acc,none': 0.6987951807228916, 'acc_stderr,none': 0.0506639425494172, 'alias': ' - xwinograd_fr'}, 'xwinograd_jp': {'acc,none': 0.6037539103232534, 'acc_stderr,none': 0.015802642616557255, 'alias': ' - xwinograd_jp'}, 'xwinograd_pt': {'acc,none': 0.7680608365019012, 'acc_stderr,none': 0.026075593860304693, 'alias': ' - xwinograd_pt'}, 'xwinograd_ru': {'acc,none': 0.5714285714285714, 'acc_stderr,none': 0.02792722339076032, 'alias': ' - xwinograd_ru'}, 'xwinograd_zh': {'acc,none': 0.7559523809523809, 'acc_stderr,none': 0.01915139944664688, 'alias': ' - xwinograd_zh'}, 'xstorycloze': {'acc,none': 0.5927441188857469, 'acc_stderr,none': 0.05262352730974911, 'alias': 'xstorycloze'}, 'xstorycloze_ar': {'acc,none': 0.5883520847121112, 'acc_stderr,none': 0.01266464832921408, 'alias': ' - xstorycloze_ar'}, 'xstorycloze_en': {'acc,none': 0.7081403044341495, 'acc_stderr,none': 0.01169925603764938, 'alias': ' - xstorycloze_en'}, 'xstorycloze_es': {'acc,none': 0.6598279285241562, 'acc_stderr,none': 0.012192034998028832, 'alias': ' - xstorycloze_es'}, 'xstorycloze_eu': {'acc,none': 0.57114493712773, 'acc_stderr,none': 0.012736202713147777, 'alias': ' - xstorycloze_eu'}, 'xstorycloze_hi': {'acc,none': 0.6048974189278623, 'acc_stderr,none': 0.012580772976133262, 'alias': ' - xstorycloze_hi'}, 'xstorycloze_id': {'acc,none': 0.6419589675711449, 'acc_stderr,none': 0.012337624883487575, 'alias': ' - xstorycloze_id'}, 'xstorycloze_my': {'acc,none': 0.48378557246856385, 'acc_stderr,none': 0.012860357805055867, 'alias': ' - xstorycloze_my'}, 'xstorycloze_ru': {'acc,none': 0.5268034414295168, 'acc_stderr,none': 0.012848623899505765, 'alias': ' - xstorycloze_ru'}, 'xstorycloze_sw': {'acc,none': 0.5413633355393779, 'acc_stderr,none': 0.012823020340169822, 'alias': ' - xstorycloze_sw'}, 'xstorycloze_te': {'acc,none': 0.5744540039708802, 'acc_stderr,none': 0.012723670419166326, 'alias': ' - xstorycloze_te'}, 'xstorycloze_zh': {'acc,none': 0.6194573130377233, 'acc_stderr,none': 0.012494500786685344, 'alias': ' - xstorycloze_zh'}}, 'groups': {'xcopa': {'acc,none': 0.5709090909090908, 'acc_stderr,none': 0.06135942275478038, 'alias': 'xcopa'}, 'xnli': {'acc,none': 0.41204819277108434, 'acc_stderr,none': 0.051535476594892576, 'alias': 'xnli'}, 'pawsx': {'acc,none': 0.5078571428571429, 'acc_stderr,none': 0.03988534011535243, 'alias': 'pawsx'}, 'lambada_multilingual': {'perplexity,none': 131.45396740665825, 'perplexity_stderr,none': 95.28024178884175, 'acc,none': 0.38490199883562976, 'acc_stderr,none': 0.07608898792977997, 'alias': 'lambada_multilingual'}, 'xwinograd': {'acc,none': 0.7442121825129242, 'acc_stderr,none': 0.06414679137553342, 'alias': 'xwinograd'}, 'xstorycloze': {'acc,none': 0.5927441188857469, 'acc_stderr,none': 0.05262352730974911, 'alias': 'xstorycloze'}}}}}\n" ] } ], "source": [ "import json\n", "\n", "# Global result map if it's not set\n", "if 'global_result_map' not in globals():\n", " global_result_map = {}\n", "\n", "#\n", "# Function to process the results.json file\n", "#\n", "def process_results_json(file_path):\n", " with open(file_path) as f:\n", " data = json.load(f)\n", "\n", " # Model args, presplit by ','\n", " model_args = data['config']['model_args'].split(',')\n", "\n", " # Extract the pretrained value from config.model_args\n", " modelname = model_args[0].split('=')[1]\n", "\n", " # Opt array\n", " confArgsArr = model_args[1:]\n", "\n", " # Sort the opt array\n", " confArgsArr.sort()\n", " # Convert it to a string\n", " confStr = ','.join(confArgsArr)\n", "\n", " # Convert the option array of key=value strings to a dictionary\n", " confObj = { }\n", " for o in confArgsArr:\n", " k, v = o.split('=')\n", " confObj[k] = v\n", " \n", " # Create a dictionary to store the results, or use the existing one if it exists\n", " if modelname in global_result_map:\n", " modelObj = global_result_map[modelname]\n", " else:\n", " modelObj = {\n", " 'name': modelname,\n", " 'config': { }\n", " }\n", " \n", " # Get the opt object for the model\n", " if confStr in modelObj['config']:\n", " confSet = modelObj['config'][confStr]\n", " else:\n", " confSet = {\n", " 'confStr': confStr,\n", " 'confObj': confObj,\n", " 'results': {},\n", " 'groups': {}\n", " }\n", "\n", " # Iterate over the results and extract the result object for each test/group\n", " if 'results' in data:\n", " for test, result in data['results'].items():\n", " confSet['results'][test] = result\n", " if 'groups' in data:\n", " for test, result in data['groups'].items():\n", " confSet['groups'][test] = result\n", " \n", " # Update the global result map object\n", " modelObj['config'][confStr] = confSet\n", " global_result_map[modelname] = modelObj\n", " return modelObj\n", "\n", "# Lets test the function with the first results.json file\n", "first_result = process_results_json(results_json_files[0])\n", "print(f\"Processed example: \", first_result)\n" ] }, { "cell_type": "code", "execution_count": 50, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 36 models\n", "Models: \n", "['bigscience/bloom-7b1', 'togethercomputer/RedPajama-INCITE-7B-Base', 'mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/rwkv-4-world-7b', 'RWKV/HF_v5-Eagle-7B', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf']\n", "Saved to compiled-lm-eval-results.json\n" ] } ], "source": [ "# Lets reset and reprocess all the results.json files\n", "global_result_map = {}\n", "\n", "# Process all the results.json files\n", "for file in results_json_files:\n", " process_results_json(file)\n", "\n", "# Show high level list of models\n", "print(f\"Found {len(global_result_map)} models\")\n", "print(f\"Models: \\n{list(global_result_map.keys())}\")\n", "\n", "# Save the result map to a file\n", "with open('summary/compiled-lm-eval-results.json', 'w') as f:\n", " json.dump(global_result_map, f, sort_keys=True, indent='\\t')\n", "\n", "# Echo that its been saved to json\n", "print(f\"Saved to compiled-lm-eval-results.json\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Convert the results into CSV table formats" ] }, { "cell_type": "code", "execution_count": 56, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", " | model | \n", "avg_acc | \n", "avg_acc_stderr | \n", "xcopa (acc) | \n", "xcopa (acc_stderr) | \n", "
---|---|---|---|---|---|
0 | \n", "bigscience/bloom-7b1 | \n", "0.570909 | \n", "0.061359 | \n", "0.570909 | \n", "0.061359 | \n", "
1 | \n", "togethercomputer/RedPajama-INCITE-7B-Base | \n", "0.525455 | \n", "0.036407 | \n", "0.525455 | \n", "0.036407 | \n", "
2 | \n", "mistralai/Mistral-7B-v0.1 | \n", "0.558727 | \n", "0.055164 | \n", "0.558727 | \n", "0.055164 | \n", "
3 | \n", "mosaicml/mpt-7b-instruct | \n", "0.537091 | \n", "0.041919 | \n", "0.537091 | \n", "0.041919 | \n", "
4 | \n", "mosaicml/mpt-7b | \n", "0.536000 | \n", "0.042339 | \n", "0.536000 | \n", "0.042339 | \n", "
5 | \n", "mosaicml/mpt-7b-chat | \n", "0.538000 | \n", "0.047059 | \n", "0.538000 | \n", "0.047059 | \n", "
6 | \n", "bigscience/bloomz-7b1-mt | \n", "0.546000 | \n", "0.038321 | \n", "0.546000 | \n", "0.038321 | \n", "
7 | \n", "bigscience/bloomz-7b1 | \n", "0.547818 | \n", "0.038920 | \n", "0.547818 | \n", "0.038920 | \n", "
8 | \n", "EleutherAI/pythia-2.8b | \n", "0.537455 | \n", "0.026941 | \n", "0.537455 | \n", "0.026941 | \n", "
9 | \n", "EleutherAI/pythia-1.4b | \n", "0.526545 | \n", "0.027441 | \n", "0.526545 | \n", "0.027441 | \n", "
10 | \n", "EleutherAI/gpt-j-6b | \n", "0.544182 | \n", "0.034404 | \n", "0.544182 | \n", "0.034404 | \n", "
11 | \n", "EleutherAI/pythia-6.9b | \n", "0.540545 | \n", "0.029689 | \n", "0.540545 | \n", "0.029689 | \n", "
12 | \n", "microsoft/phi-1_5 | \n", "0.521636 | \n", "0.026198 | \n", "0.521636 | \n", "0.026198 | \n", "
13 | \n", "microsoft/phi-2 | \n", "0.512182 | \n", "0.029742 | \n", "0.512182 | \n", "0.029742 | \n", "
14 | \n", "microsoft/phi-1 | \n", "0.517636 | \n", "0.029612 | \n", "0.517636 | \n", "0.029612 | \n", "
15 | \n", "allenai/OLMo-7B | \n", "0.537818 | \n", "0.034147 | \n", "0.537818 | \n", "0.034147 | \n", "
16 | \n", "TinyLlama/TinyLlama-1.1B-intermediate-step-143... | \n", "0.529273 | \n", "0.029316 | \n", "0.529273 | \n", "0.029316 | \n", "
17 | \n", "TinyLlama/TinyLlama-1.1B-Chat-v1.0 | \n", "0.528909 | \n", "0.031702 | \n", "0.528909 | \n", "0.031702 | \n", "
18 | \n", "RWKV/rwkv-5-world-1b5 | \n", "0.578909 | \n", "0.045103 | \n", "0.578909 | \n", "0.045103 | \n", "
19 | \n", "RWKV/rwkv-5-world-3b | \n", "0.590182 | \n", "0.056241 | \n", "0.590182 | \n", "0.056241 | \n", "
20 | \n", "RWKV/rwkv-4-world-3b | \n", "0.575455 | \n", "0.040977 | \n", "0.575455 | \n", "0.040977 | \n", "
21 | \n", "RWKV/rwkv-4-world-1b5 | \n", "0.554000 | \n", "0.039406 | \n", "0.554000 | \n", "0.039406 | \n", "
22 | \n", "RWKV/rwkv-4-world-7b | \n", "0.601455 | \n", "0.053116 | \n", "0.601455 | \n", "0.053116 | \n", "
23 | \n", "RWKV/HF_v5-Eagle-7B | \n", "0.621818 | \n", "0.068986 | \n", "0.621818 | \n", "0.068986 | \n", "
24 | \n", "togethercomputer/RedPajama-INCITE-7B-Instruct | \n", "0.528545 | \n", "0.036470 | \n", "0.528545 | \n", "0.036470 | \n", "
25 | \n", "togethercomputer/RedPajama-INCITE-7B-Chat | \n", "0.535455 | \n", "0.038723 | \n", "0.535455 | \n", "0.038723 | \n", "
26 | \n", "facebook/opt-2.7b | \n", "0.521818 | \n", "0.029821 | \n", "0.521818 | \n", "0.029821 | \n", "
27 | \n", "facebook/opt-6.7b | \n", "0.522909 | \n", "0.027216 | \n", "0.522909 | \n", "0.027216 | \n", "
28 | \n", "facebook/opt-1.3b | \n", "0.521818 | \n", "0.029112 | \n", "0.521818 | \n", "0.029112 | \n", "
29 | \n", "tiiuae/falcon-7b-instruct | \n", "0.536727 | \n", "0.053430 | \n", "0.536727 | \n", "0.053430 | \n", "
30 | \n", "tiiuae/falcon-rw-1b | \n", "0.522545 | \n", "0.029446 | \n", "0.522545 | \n", "0.029446 | \n", "
31 | \n", "tiiuae/falcon-rw-7b | \n", "0.535818 | \n", "0.033185 | \n", "0.535818 | \n", "0.033185 | \n", "
32 | \n", "tiiuae/falcon-7b | \n", "0.559636 | \n", "0.071650 | \n", "0.559636 | \n", "0.071650 | \n", "
33 | \n", "huggyllama/llama-7b | \n", "0.541818 | \n", "0.040718 | \n", "0.541818 | \n", "0.040718 | \n", "
34 | \n", "meta-llama/Llama-2-7b-chat-hf | \n", "0.000000 | \n", "0.000000 | \n", "NaN | \n", "NaN | \n", "
35 | \n", "meta-llama/Llama-2-7b-hf | \n", "0.566727 | \n", "0.052515 | \n", "0.566727 | \n", "0.052515 | \n", "