{ "cells": [ { "cell_type": "code", "execution_count": 67, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Defaulting to user installation because normal site-packages is not writeable\n", "Requirement already satisfied: pandas in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (2.2.0)\n", "Requirement already satisfied: numpy<2,>=1.22.4 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (1.26.1)\n", "Requirement already satisfied: pytz>=2020.1 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n", "Requirement already satisfied: python-dateutil>=2.8.2 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2.8.2)\n", "Requirement already satisfied: tzdata>=2022.7 in /Users/picocreator/Library/Python/3.9/lib/python/site-packages (from pandas) (2024.1)\n", "Requirement already satisfied: six>=1.5 in /Library/Developer/CommandLineTools/Library/Frameworks/Python3.framework/Versions/3.9/lib/python3.9/site-packages (from python-dateutil>=2.8.2->pandas) (1.15.0)\n", "\u001b[33mWARNING: You are using pip version 21.2.4; however, version 24.0 is available.\n", "You should consider upgrading via the '/Library/Developer/CommandLineTools/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n" ] } ], "source": [ "!pip3 install pandas" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Get the filelist\n", "\n", "For the full results.json" ] }, { "cell_type": "code", "execution_count": 68, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 2489 results.json files\n" ] } ], "source": [ "import glob\n", "\n", "# Specify the path to the folder containing the results.json files\n", "folder_path = \"lm-eval-output\"\n", "\n", "# Use glob to find all the results.json files\n", "results_json_files = glob.glob(f\"{folder_path}/**/results.json\", recursive=True)\n", "\n", "# Show total number of results.json files found\n", "print(f\"Found {len(results_json_files)} results.json files\")\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Process all the results.json\n", "\n", "One file at a time" ] }, { "cell_type": "code", "execution_count": 69, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Processed example: {'name': 'mistralai/Mistral-7B-v0.1', 'config': {'dtype=bfloat16,trust_remote_code=True': {'confStr': 'dtype=bfloat16,trust_remote_code=True', 'confObj': {'dtype': 'bfloat16', 'trust_remote_code': 'True'}, 'results': {'xcopa': {'acc,none': 0.5587272727272727, 'acc_stderr,none': 0.0551636604460852, 'alias': 'xcopa'}, 'xcopa_et': {'acc,none': 0.466, 'acc_stderr,none': 0.02233126442325838, 'alias': ' - xcopa_et'}, 'xcopa_ht': {'acc,none': 0.512, 'acc_stderr,none': 0.02237662679792717, 'alias': ' - xcopa_ht'}, 'xcopa_id': {'acc,none': 0.582, 'acc_stderr,none': 0.022080014812228137, 'alias': ' - xcopa_id'}, 'xcopa_it': {'acc,none': 0.66, 'acc_stderr,none': 0.021206117013673066, 'alias': ' - xcopa_it'}, 'xcopa_qu': {'acc,none': 0.482, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_qu'}, 'xcopa_sw': {'acc,none': 0.518, 'acc_stderr,none': 0.02236856511738799, 'alias': ' - xcopa_sw'}, 'xcopa_ta': {'acc,none': 0.542, 'acc_stderr,none': 0.02230396677426995, 'alias': ' - xcopa_ta'}, 'xcopa_th': {'acc,none': 0.564, 'acc_stderr,none': 0.0221989546414768, 'alias': ' - xcopa_th'}, 'xcopa_tr': {'acc,none': 0.568, 'acc_stderr,none': 0.02217510926561316, 'alias': ' - xcopa_tr'}, 'xcopa_vi': {'acc,none': 0.59, 'acc_stderr,none': 0.022017482578127672, 'alias': ' - xcopa_vi'}, 'xcopa_zh': {'acc,none': 0.662, 'acc_stderr,none': 0.021175665695209407, 'alias': ' - xcopa_zh'}, 'xnli': {'acc,none': 0.43175368139223563, 'acc_stderr,none': 0.0565098070106032, 'alias': 'xnli'}, 'xnli_ar': {'acc,none': 0.334136546184739, 'acc_stderr,none': 0.009454577602463621, 'alias': ' - xnli_ar'}, 'xnli_bg': {'acc,none': 0.4534136546184739, 'acc_stderr,none': 0.009978476483838962, 'alias': ' - xnli_bg'}, 'xnli_de': {'acc,none': 0.5012048192771085, 'acc_stderr,none': 0.01002204377131557, 'alias': ' - xnli_de'}, 'xnli_el': {'acc,none': 0.41365461847389556, 'acc_stderr,none': 0.009871502159099366, 'alias': ' - xnli_el'}, 'xnli_en': {'acc,none': 0.5690763052208835, 'acc_stderr,none': 0.009925970741520641, 'alias': ' - xnli_en'}, 'xnli_es': {'acc,none': 0.4562248995983936, 'acc_stderr,none': 0.009983589197693925, 'alias': ' - xnli_es'}, 'xnli_fr': {'acc,none': 0.5100401606425703, 'acc_stderr,none': 0.010020052116889137, 'alias': ' - xnli_fr'}, 'xnli_hi': {'acc,none': 0.42650602409638555, 'acc_stderr,none': 0.009913215943570534, 'alias': ' - xnli_hi'}, 'xnli_ru': {'acc,none': 0.4967871485943775, 'acc_stderr,none': 0.010021865961119557, 'alias': ' - xnli_ru'}, 'xnli_sw': {'acc,none': 0.363855421686747, 'acc_stderr,none': 0.009643393577626719, 'alias': ' - xnli_sw'}, 'xnli_th': {'acc,none': 0.38835341365461845, 'acc_stderr,none': 0.009769028875673285, 'alias': ' - xnli_th'}, 'xnli_tr': {'acc,none': 0.43654618473895584, 'acc_stderr,none': 0.009941039791133128, 'alias': ' - xnli_tr'}, 'xnli_ur': {'acc,none': 0.3381526104417671, 'acc_stderr,none': 0.009482500057981031, 'alias': ' - xnli_ur'}, 'xnli_vi': {'acc,none': 0.41244979919678715, 'acc_stderr,none': 0.009867237678555586, 'alias': ' - xnli_vi'}, 'xnli_zh': {'acc,none': 0.3759036144578313, 'acc_stderr,none': 0.00970848885066604, 'alias': ' - xnli_zh'}, 'pawsx': {'acc,none': 0.41585714285714287, 'acc_stderr,none': 0.05538778178867068, 'alias': 'pawsx'}, 'paws_de': {'acc,none': 0.385, 'acc_stderr,none': 0.010883323176386978, 'alias': ' - paws_de'}, 'paws_en': {'acc,none': 0.3125, 'acc_stderr,none': 0.010367044555050548, 'alias': ' - paws_en'}, 'paws_es': {'acc,none': 0.356, 'acc_stderr,none': 0.010709311120344539, 'alias': ' - paws_es'}, 'paws_fr': {'acc,none': 0.4885, 'acc_stderr,none': 0.011180177690296085, 'alias': ' - paws_fr'}, 'paws_ja': {'acc,none': 0.534, 'acc_stderr,none': 0.011157250652425779, 'alias': ' - paws_ja'}, 'paws_ko': {'acc,none': 0.4175, 'acc_stderr,none': 0.011029855114729358, 'alias': ' - paws_ko'}, 'paws_zh': {'acc,none': 0.4175, 'acc_stderr,none': 0.011029855114729354, 'alias': ' - paws_zh'}, 'lambada_multilingual': {'perplexity,none': 27.047409162154935, 'perplexity_stderr,none': 8.199911438395738, 'acc,none': 0.5190374539103435, 'acc_stderr,none': 0.07089117907004505, 'alias': 'lambada_multilingual'}, 'lambada_openai_mt_de': {'perplexity,none': 43.294453054791916, 'perplexity_stderr,none': 2.4066806886162686, 'acc,none': 0.39996118765767513, 'acc_stderr,none': 0.006825125929166165, 'alias': ' - lambada_openai_mt_de'}, 'lambada_openai_mt_en': {'perplexity,none': 3.1814104914677763, 'perplexity_stderr,none': 0.05822157255540461, 'acc,none': 0.7554822433533864, 'acc_stderr,none': 0.005987967089937308, 'alias': ' - lambada_openai_mt_en'}, 'lambada_openai_mt_es': {'perplexity,none': 36.26423960927208, 'perplexity_stderr,none': 1.790606090078102, 'acc,none': 0.42790607413157383, 'acc_stderr,none': 0.00689318551693077, 'alias': ' - lambada_openai_mt_es'}, 'lambada_openai_mt_fr': {'perplexity,none': 22.218390608610928, 'perplexity_stderr,none': 1.1061897900321798, 'acc,none': 0.5214438191344848, 'acc_stderr,none': 0.006959568274744848, 'alias': ' - lambada_openai_mt_fr'}, 'lambada_openai_mt_it': {'perplexity,none': 30.278552046631987, 'perplexity_stderr,none': 1.6707259318257452, 'acc,none': 0.49039394527459734, 'acc_stderr,none': 0.006964691949428186, 'alias': ' - lambada_openai_mt_it'}, 'xwinograd': {'acc,none': 0.8141155315801304, 'acc_stderr,none': 0.047153752482205775, 'alias': 'xwinograd'}, 'xwinograd_en': {'acc,none': 0.8868817204301075, 'acc_stderr,none': 0.0065702392696682255, 'alias': ' - xwinograd_en'}, 'xwinograd_fr': {'acc,none': 0.7469879518072289, 'acc_stderr,none': 0.048008758304372776, 'alias': ' - xwinograd_fr'}, 'xwinograd_jp': {'acc,none': 0.721584984358707, 'acc_stderr,none': 0.014481292182837467, 'alias': ' - xwinograd_jp'}, 'xwinograd_pt': {'acc,none': 0.7642585551330798, 'acc_stderr,none': 0.026223308206222536, 'alias': ' - xwinograd_pt'}, 'xwinograd_ru': {'acc,none': 0.6888888888888889, 'acc_stderr,none': 0.02612567541895451, 'alias': ' - xwinograd_ru'}, 'xwinograd_zh': {'acc,none': 0.7698412698412699, 'acc_stderr,none': 0.018768533005904867, 'alias': ' - xwinograd_zh'}, 'xstorycloze': {'acc,none': 0.5916611515552614, 'acc_stderr,none': 0.07711658992261772, 'alias': 'xstorycloze'}, 'xstorycloze_ar': {'acc,none': 0.5294506949040371, 'acc_stderr,none': 0.012844785490016997, 'alias': ' - xstorycloze_ar'}, 'xstorycloze_en': {'acc,none': 0.786896095301125, 'acc_stderr,none': 0.010538187590034574, 'alias': ' - xstorycloze_en'}, 'xstorycloze_es': {'acc,none': 0.6909331568497684, 'acc_stderr,none': 0.011892023305070085, 'alias': ' - xstorycloze_es'}, 'xstorycloze_eu': {'acc,none': 0.5109199205823958, 'acc_stderr,none': 0.012864056278255043, 'alias': ' - xstorycloze_eu'}, 'xstorycloze_hi': {'acc,none': 0.5539377895433488, 'acc_stderr,none': 0.012792037953589649, 'alias': ' - xstorycloze_hi'}, 'xstorycloze_id': {'acc,none': 0.5936465916611515, 'acc_stderr,none': 0.012639429420389871, 'alias': ' - xstorycloze_id'}, 'xstorycloze_my': {'acc,none': 0.4884182660489742, 'acc_stderr,none': 0.012863672949335892, 'alias': ' - xstorycloze_my'}, 'xstorycloze_ru': {'acc,none': 0.6651224354731966, 'acc_stderr,none': 0.012145219027833156, 'alias': ' - xstorycloze_ru'}, 'xstorycloze_sw': {'acc,none': 0.5129053606882858, 'acc_stderr,none': 0.012862838605728476, 'alias': ' - xstorycloze_sw'}, 'xstorycloze_te': {'acc,none': 0.5413633355393779, 'acc_stderr,none': 0.012823020340169815, 'alias': ' - xstorycloze_te'}, 'xstorycloze_zh': {'acc,none': 0.6346790205162144, 'acc_stderr,none': 0.012391557728373984, 'alias': ' - xstorycloze_zh'}}, 'groups': {'xcopa': {'acc,none': 0.5587272727272727, 'acc_stderr,none': 0.0551636604460852, 'alias': 'xcopa'}, 'xnli': {'acc,none': 0.43175368139223563, 'acc_stderr,none': 0.0565098070106032, 'alias': 'xnli'}, 'pawsx': {'acc,none': 0.41585714285714287, 'acc_stderr,none': 0.05538778178867068, 'alias': 'pawsx'}, 'lambada_multilingual': {'perplexity,none': 27.047409162154935, 'perplexity_stderr,none': 8.199911438395738, 'acc,none': 0.5190374539103435, 'acc_stderr,none': 0.07089117907004505, 'alias': 'lambada_multilingual'}, 'xwinograd': {'acc,none': 0.8141155315801304, 'acc_stderr,none': 0.047153752482205775, 'alias': 'xwinograd'}, 'xstorycloze': {'acc,none': 0.5916611515552614, 'acc_stderr,none': 0.07711658992261772, 'alias': 'xstorycloze'}}}}}\n" ] } ], "source": [ "import json\n", "\n", "# Global result map if it's not set\n", "if 'global_result_map' not in globals():\n", " global_result_map = {}\n", "\n", "#\n", "# Function to process the results.json file\n", "#\n", "def process_results_json(file_path):\n", " with open(file_path) as f:\n", " data = json.load(f)\n", "\n", " # Model args, presplit by ','\n", " model_args = data['config']['model_args'].split(',')\n", "\n", " # Extract the pretrained value from config.model_args\n", " modelname = model_args[0].split('=')[1]\n", "\n", " # Opt array\n", " confArgsArr = model_args[1:]\n", "\n", " # Sort the opt array\n", " confArgsArr.sort()\n", " # Convert it to a string\n", " confStr = ','.join(confArgsArr)\n", "\n", " # Convert the option array of key=value strings to a dictionary\n", " confObj = { }\n", " for o in confArgsArr:\n", " k, v = o.split('=')\n", " confObj[k] = v\n", " \n", " # Create a dictionary to store the results, or use the existing one if it exists\n", " if modelname in global_result_map:\n", " modelObj = global_result_map[modelname]\n", " else:\n", " modelObj = {\n", " 'name': modelname,\n", " 'config': { }\n", " }\n", " \n", " # Get the opt object for the model\n", " if confStr in modelObj['config']:\n", " confSet = modelObj['config'][confStr]\n", " else:\n", " confSet = {\n", " 'confStr': confStr,\n", " 'confObj': confObj,\n", " 'results': {},\n", " 'groups': {}\n", " }\n", "\n", " # Iterate over the results and extract the result object for each test/group\n", " if 'results' in data:\n", " for test, result in data['results'].items():\n", " confSet['results'][test] = result\n", " if 'groups' in data:\n", " for test, result in data['groups'].items():\n", " confSet['groups'][test] = result\n", " \n", " # Update the global result map object\n", " modelObj['config'][confStr] = confSet\n", " global_result_map[modelname] = modelObj\n", " return modelObj\n", "\n", "# Lets test the function with the first results.json file\n", "first_result = process_results_json(results_json_files[0])\n", "print(f\"Processed example: \", first_result)\n" ] }, { "cell_type": "code", "execution_count": 70, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 38 models\n", "Models: \n", "['mistralai/Mistral-7B-v0.1', 'mosaicml/mpt-7b-instruct', 'mosaicml/mpt-7b', 'mosaicml/mpt-7b-chat', 'bigscience/bloom-7b1', 'bigscience/bloomz-7b1-mt', 'bigscience/bloomz-7b1', 'EleutherAI/pythia-2.8b', 'EleutherAI/pythia-1.4b', 'EleutherAI/gpt-j-6b', 'EleutherAI/pythia-6.9b', 'google/gemma-2b', 'google/gemma-7b', 'microsoft/phi-1_5', 'microsoft/phi-2', 'microsoft/phi-1', 'allenai/OLMo-7B', 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'RWKV/rwkv-5-world-1b5', 'RWKV/rwkv-5-world-3b', 'RWKV/rwkv-4-world-3b', 'RWKV/rwkv-4-world-1b5', 'RWKV/rwkv-4-world-7b', 'RWKV/HF_v5-Eagle-7B', 'togethercomputer/RedPajama-INCITE-7B-Base', 'togethercomputer/RedPajama-INCITE-7B-Instruct', 'togethercomputer/RedPajama-INCITE-7B-Chat', 'facebook/opt-2.7b', 'facebook/opt-6.7b', 'facebook/opt-1.3b', 'tiiuae/falcon-7b-instruct', 'tiiuae/falcon-rw-1b', 'tiiuae/falcon-rw-7b', 'tiiuae/falcon-7b', 'huggyllama/llama-7b', 'meta-llama/Llama-2-7b-chat-hf', 'meta-llama/Llama-2-7b-hf']\n", "Saved to compiled-lm-eval-results.json\n" ] } ], "source": [ "# Lets reset and reprocess all the results.json files\n", "global_result_map = {}\n", "\n", "# Process all the results.json files\n", "for file in results_json_files:\n", " process_results_json(file)\n", "\n", "# Show high level list of models\n", "print(f\"Found {len(global_result_map)} models\")\n", "print(f\"Models: \\n{list(global_result_map.keys())}\")\n", "\n", "# Save the result map to a file\n", "with open('summary/compiled-lm-eval-results.json', 'w') as f:\n", " json.dump(global_result_map, f, sort_keys=True, indent='\\t')\n", "\n", "# Echo that its been saved to json\n", "print(f\"Saved to compiled-lm-eval-results.json\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Convert the results into CSV table formats" ] }, { "cell_type": "code", "execution_count": 71, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", " | model | \n", "avg_acc | \n", "avg_acc_stderr | \n", "xcopa (acc) | \n", "xcopa (acc_stderr) | \n", "
---|---|---|---|---|---|
0 | \n", "mistralai/Mistral-7B-v0.1 | \n", "0.558727 | \n", "0.055164 | \n", "0.558727 | \n", "0.055164 | \n", "
1 | \n", "mosaicml/mpt-7b-instruct | \n", "0.537091 | \n", "0.041919 | \n", "0.537091 | \n", "0.041919 | \n", "
2 | \n", "mosaicml/mpt-7b | \n", "0.536000 | \n", "0.042339 | \n", "0.536000 | \n", "0.042339 | \n", "
3 | \n", "mosaicml/mpt-7b-chat | \n", "0.538000 | \n", "0.047059 | \n", "0.538000 | \n", "0.047059 | \n", "
4 | \n", "bigscience/bloom-7b1 | \n", "0.570909 | \n", "0.061359 | \n", "0.570909 | \n", "0.061359 | \n", "
5 | \n", "bigscience/bloomz-7b1-mt | \n", "0.546000 | \n", "0.038321 | \n", "0.546000 | \n", "0.038321 | \n", "
6 | \n", "bigscience/bloomz-7b1 | \n", "0.547818 | \n", "0.038920 | \n", "0.547818 | \n", "0.038920 | \n", "
7 | \n", "EleutherAI/pythia-2.8b | \n", "0.537455 | \n", "0.026941 | \n", "0.537455 | \n", "0.026941 | \n", "
8 | \n", "EleutherAI/pythia-1.4b | \n", "0.526545 | \n", "0.027441 | \n", "0.526545 | \n", "0.027441 | \n", "
9 | \n", "EleutherAI/gpt-j-6b | \n", "0.544182 | \n", "0.034404 | \n", "0.544182 | \n", "0.034404 | \n", "
10 | \n", "EleutherAI/pythia-6.9b | \n", "0.540545 | \n", "0.029689 | \n", "0.540545 | \n", "0.029689 | \n", "
11 | \n", "google/gemma-2b | \n", "0.520545 | \n", "0.025332 | \n", "0.520545 | \n", "0.025332 | \n", "
12 | \n", "google/gemma-7b | \n", "0.517636 | \n", "0.027330 | \n", "0.517636 | \n", "0.027330 | \n", "
13 | \n", "microsoft/phi-1_5 | \n", "0.521636 | \n", "0.026198 | \n", "0.521636 | \n", "0.026198 | \n", "
14 | \n", "microsoft/phi-2 | \n", "0.512182 | \n", "0.029742 | \n", "0.512182 | \n", "0.029742 | \n", "
15 | \n", "microsoft/phi-1 | \n", "0.517636 | \n", "0.029612 | \n", "0.517636 | \n", "0.029612 | \n", "
16 | \n", "allenai/OLMo-7B | \n", "0.537818 | \n", "0.034147 | \n", "0.537818 | \n", "0.034147 | \n", "
17 | \n", "TinyLlama/TinyLlama-1.1B-intermediate-step-143... | \n", "0.529273 | \n", "0.029316 | \n", "0.529273 | \n", "0.029316 | \n", "
18 | \n", "TinyLlama/TinyLlama-1.1B-Chat-v1.0 | \n", "0.528909 | \n", "0.031702 | \n", "0.528909 | \n", "0.031702 | \n", "
19 | \n", "RWKV/rwkv-5-world-1b5 | \n", "0.578909 | \n", "0.045103 | \n", "0.578909 | \n", "0.045103 | \n", "
20 | \n", "RWKV/rwkv-5-world-3b | \n", "0.590182 | \n", "0.056241 | \n", "0.590182 | \n", "0.056241 | \n", "
21 | \n", "RWKV/rwkv-4-world-3b | \n", "0.575455 | \n", "0.040977 | \n", "0.575455 | \n", "0.040977 | \n", "
22 | \n", "RWKV/rwkv-4-world-1b5 | \n", "0.554000 | \n", "0.039406 | \n", "0.554000 | \n", "0.039406 | \n", "
23 | \n", "RWKV/rwkv-4-world-7b | \n", "0.601455 | \n", "0.053116 | \n", "0.601455 | \n", "0.053116 | \n", "
24 | \n", "RWKV/HF_v5-Eagle-7B | \n", "0.621818 | \n", "0.068986 | \n", "0.621818 | \n", "0.068986 | \n", "
25 | \n", "togethercomputer/RedPajama-INCITE-7B-Base | \n", "0.525455 | \n", "0.036407 | \n", "0.525455 | \n", "0.036407 | \n", "
26 | \n", "togethercomputer/RedPajama-INCITE-7B-Instruct | \n", "0.528545 | \n", "0.036470 | \n", "0.528545 | \n", "0.036470 | \n", "
27 | \n", "togethercomputer/RedPajama-INCITE-7B-Chat | \n", "0.535455 | \n", "0.038723 | \n", "0.535455 | \n", "0.038723 | \n", "
28 | \n", "facebook/opt-2.7b | \n", "0.521818 | \n", "0.029821 | \n", "0.521818 | \n", "0.029821 | \n", "
29 | \n", "facebook/opt-6.7b | \n", "0.522909 | \n", "0.027216 | \n", "0.522909 | \n", "0.027216 | \n", "
30 | \n", "facebook/opt-1.3b | \n", "0.521818 | \n", "0.029112 | \n", "0.521818 | \n", "0.029112 | \n", "
31 | \n", "tiiuae/falcon-7b-instruct | \n", "0.536727 | \n", "0.053430 | \n", "0.536727 | \n", "0.053430 | \n", "
32 | \n", "tiiuae/falcon-rw-1b | \n", "0.522545 | \n", "0.029446 | \n", "0.522545 | \n", "0.029446 | \n", "
33 | \n", "tiiuae/falcon-rw-7b | \n", "0.535818 | \n", "0.033185 | \n", "0.535818 | \n", "0.033185 | \n", "
34 | \n", "tiiuae/falcon-7b | \n", "0.559636 | \n", "0.071650 | \n", "0.559636 | \n", "0.071650 | \n", "
35 | \n", "huggyllama/llama-7b | \n", "0.541818 | \n", "0.040718 | \n", "0.541818 | \n", "0.040718 | \n", "
36 | \n", "meta-llama/Llama-2-7b-chat-hf | \n", "0.000000 | \n", "0.000000 | \n", "NaN | \n", "NaN | \n", "
37 | \n", "meta-llama/Llama-2-7b-hf | \n", "0.566727 | \n", "0.052515 | \n", "0.566727 | \n", "0.052515 | \n", "