haoyang commited on
Commit
a24bc73
β€’
1 Parent(s): 221d26c

update data

Browse files
01-ai/Yi-34B-Chat/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "01-ai/Yi-34B-Chat",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "01-ai/Yi-34B-Chat",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.bfloat16",
6
+ "num_params": 34.389
7
  },
8
  "results": {
9
  "SAS": {
Claude-2/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "Claude-2",
4
- "model_type": "close-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "Claude-2",
4
+ "model_type": "close-source",
5
+ "model_dtype": "?",
6
+ "num_params": 0
7
  },
8
  "results": {
9
  "SAS": {
Claude-Instant/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "Claude-Instant",
4
- "model_type": "close-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "Claude-Instant",
4
+ "model_type": "close-source",
5
+ "model_dtype": "?",
6
+ "num_params": 0
7
  },
8
  "results": {
9
  "SAS": {
GPT-3.5-Turbo/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "GPT-3.5-Turbo",
4
- "model_type": "close-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "GPT-3.5-Turbo",
4
+ "model_type": "close-source",
5
+ "model_dtype": "?",
6
+ "num_params": 0
7
  },
8
  "results": {
9
  "SAS": {
GPT-4-Turbo/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "GPT-4-Turbo",
4
- "model_type": "close-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "GPT-4-Turbo",
4
+ "model_type": "close-source",
5
+ "model_dtype": "?",
6
+ "num_params": 0
7
  },
8
  "results": {
9
  "SAS": {
PaLM-2/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "PaLM-2",
4
- "model_type": "close-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "PaLM-2",
4
+ "model_type": "close-source",
5
+ "model_dtype": "?",
6
+ "num_params": 0
7
  },
8
  "results": {
9
  "SAS": {
Qwen/Qwen-14B-Chat/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "Qwen/Qwen-14B-Chat",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "Qwen/Qwen-14B-Chat",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.bfloat16",
6
+ "num_params": 14.167
7
  },
8
  "results": {
9
  "SAS": {
export.ipynb CHANGED
@@ -28,6 +28,28 @@
28
  "cell_type": "code",
29
  "execution_count": 3,
30
  "metadata": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  "outputs": [],
32
  "source": [
33
  "open_models = {\n",
@@ -38,12 +60,22 @@
38
  " \"MPT-30b\": \"mosaicml/mpt-30b-instruct\",\n",
39
  " \"Phi-2\": \"microsoft/phi-2\",\n",
40
  " \"Qwen-14b\": \"Qwen/Qwen-14B-Chat\"\n",
 
 
 
 
 
 
 
 
 
 
41
  "}"
42
  ]
43
  },
44
  {
45
  "cell_type": "code",
46
- "execution_count": 4,
47
  "metadata": {},
48
  "outputs": [],
49
  "source": [
@@ -58,7 +90,7 @@
58
  },
59
  {
60
  "cell_type": "code",
61
- "execution_count": 5,
62
  "metadata": {},
63
  "outputs": [],
64
  "source": [
@@ -69,7 +101,12 @@
69
  " model_df = new_df[new_df[\"model\"] == model]\n",
70
  " model_result = result_export(model_df, model)\n",
71
  " model_result = {\n",
72
- " \"config\": {\"model_name\": model_dir, \"model_type\": \"open-source\" if model in open_models else \"close-source\"},\n",
 
 
 
 
 
73
  " \"results\": model_result\n",
74
  " }\n",
75
  " with open(f\"{model_dir}/results_{time_now}.json\", \"w\") as f:\n",
 
28
  "cell_type": "code",
29
  "execution_count": 3,
30
  "metadata": {},
31
+ "outputs": [
32
+ {
33
+ "data": {
34
+ "text/plain": [
35
+ "array(['Claude 2', 'Claude Instant', 'GPT 3.5 Turbo', 'GPT 4 Turbo',\n",
36
+ " 'MPT-30b', 'Mistral-7b', 'PaLM 2', 'Phi-1.5', 'Phi-2', 'Qwen-14b',\n",
37
+ " 'Vicuna-13b', 'Yi-34b'], dtype=object)"
38
+ ]
39
+ },
40
+ "execution_count": 3,
41
+ "metadata": {},
42
+ "output_type": "execute_result"
43
+ }
44
+ ],
45
+ "source": [
46
+ "new_df.model.unique()"
47
+ ]
48
+ },
49
+ {
50
+ "cell_type": "code",
51
+ "execution_count": 4,
52
+ "metadata": {},
53
  "outputs": [],
54
  "source": [
55
  "open_models = {\n",
 
60
  " \"MPT-30b\": \"mosaicml/mpt-30b-instruct\",\n",
61
  " \"Phi-2\": \"microsoft/phi-2\",\n",
62
  " \"Qwen-14b\": \"Qwen/Qwen-14B-Chat\"\n",
63
+ "}\n",
64
+ "\n",
65
+ "model_params = {\n",
66
+ " 'Yi-34b': ('torch.bfloat16', 34.389),\n",
67
+ " 'Mistral-7b': ('torch.bfloat16', 7.242),\n",
68
+ " 'Vicuna-13b': ('torch.float16', 13.0),\n",
69
+ " 'Phi-1.5': ('torch.float16', 1.3),\n",
70
+ " 'MPT-30b': ('torch.bfloat16', 30.0),\n",
71
+ " 'Phi-2': ('torch.float16', 2.78),\n",
72
+ " 'Qwen-14b': ('torch.bfloat16', 14.167),\n",
73
  "}"
74
  ]
75
  },
76
  {
77
  "cell_type": "code",
78
+ "execution_count": 5,
79
  "metadata": {},
80
  "outputs": [],
81
  "source": [
 
90
  },
91
  {
92
  "cell_type": "code",
93
+ "execution_count": 6,
94
  "metadata": {},
95
  "outputs": [],
96
  "source": [
 
101
  " model_df = new_df[new_df[\"model\"] == model]\n",
102
  " model_result = result_export(model_df, model)\n",
103
  " model_result = {\n",
104
+ " \"config\": {\n",
105
+ " \"model_name\": model_dir, \n",
106
+ " \"model_type\": \"open-source\" if model in open_models else \"close-source\",\n",
107
+ " \"model_dtype\": model_params[model][0] if model in model_params else \"?\",\n",
108
+ " \"num_params\": model_params[model][1] if model in model_params else 0,\n",
109
+ " },\n",
110
  " \"results\": model_result\n",
111
  " }\n",
112
  " with open(f\"{model_dir}/results_{time_now}.json\", \"w\") as f:\n",
lmsys/vicuna-13b-v1.3/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "lmsys/vicuna-13b-v1.3",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "lmsys/vicuna-13b-v1.3",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.float16",
6
+ "num_params": 13.0
7
  },
8
  "results": {
9
  "SAS": {
microsoft/phi-1_5/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "microsoft/phi-1_5",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "microsoft/phi-1_5",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.float16",
6
+ "num_params": 1.3
7
  },
8
  "results": {
9
  "SAS": {
microsoft/phi-2/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "microsoft/phi-2",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "microsoft/phi-2",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.float16",
6
+ "num_params": 2.78
7
  },
8
  "results": {
9
  "SAS": {
mistralai/Mistral-7B-Instruct-v0.1/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "mistralai/Mistral-7B-Instruct-v0.1",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "mistralai/Mistral-7B-Instruct-v0.1",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.bfloat16",
6
+ "num_params": 7.242
7
  },
8
  "results": {
9
  "SAS": {
mosaicml/mpt-30b-instruct/{results_2024-01-13T16-41-04.json β†’ results_2024-01-19T15-58-52.json} RENAMED
@@ -1,7 +1,9 @@
1
  {
2
  "config": {
3
  "model_name": "mosaicml/mpt-30b-instruct",
4
- "model_type": "open-source"
 
 
5
  },
6
  "results": {
7
  "SAS": {
 
1
  {
2
  "config": {
3
  "model_name": "mosaicml/mpt-30b-instruct",
4
+ "model_type": "open-source",
5
+ "model_dtype": "torch.bfloat16",
6
+ "num_params": 30.0
7
  },
8
  "results": {
9
  "SAS": {