osanseviero HF staff commited on
Commit
91faa94
1 Parent(s): 1f363ff

Fix finetunes

Browse files
Files changed (26) hide show
  1. Aspik101/vicuna-13b-v1.5-PL-lora_unload_eval_request_False_float16_Original.json +1 -1
  2. Austism/chronos-hermes-13b-v2_eval_request_False_float16_Original.json +1 -1
  3. CalderaAI/13B-Legerdemain-L2_eval_request_False_float16_Original.json +1 -1
  4. Gryphe/MythoLogic-L2-13b_eval_request_False_float16_Original.json +1 -1
  5. MrNJK/gpt2-xl-sft_eval_request_False_float16_Original.json +1 -1
  6. Open-Orca/OpenOrcaxOpenChat-Preview2-13B_eval_request_False_float16_Original.json +1 -1
  7. The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16_eval_request_False_float16_Original.json +1 -1
  8. ToolBench/ToolLLaMA-7b-LoRA_eval_request_False_float16_Adapter.json +1 -1
  9. bhenrym14/airophin-13b-pntk-16k-fp16_eval_request_False_float16_Adapter.json +1 -1
  10. bhenrym14/airophin-13b-pntk-16k-fp16_eval_request_False_float16_Original.json +1 -1
  11. chansung/gpt4-alpaca-lora-13b-decapoda-1024_eval_request_False_float16_Adapter.json +1 -1
  12. circulus/Llama-2-13b-orca-v1_eval_request_False_bfloat16_Original.json +1 -1
  13. circulus/Llama-2-13b-orca-v1_eval_request_False_float16_Original.json +1 -1
  14. conceptofmind/LLongMA-2-13b-16k_eval_request_False_bfloat16_Original.json +1 -1
  15. elinas/chronos-13b-v2_eval_request_False_float16_Original.json +1 -1
  16. golaxy/goims_eval_request_False_float16_Original.json +1 -1
  17. jarradh/llama2_70b_chat_uncensored_eval_request_False_float16_Original.json +1 -1
  18. jlevin/guanaco-unchained-llama-2-7b_eval_request_False_4bit_Adapter.json +1 -1
  19. jondurbin/airoboros-l2-70b-gpt4-2.0_eval_request_False_4bit_Original.json +1 -1
  20. jordiclive/gpt4all-alpaca-oa-codealpaca-lora-13b_eval_request_False_float16_Adapter.json +1 -1
  21. lmsys/vicuna-13b-v1.5-16k_eval_request_False_8bit_Original.json +1 -1
  22. pleisto/tianpeng-lora-30B_eval_request_False_float16_Adapter.json +1 -1
  23. psmathur/model_51_eval_request_False_float16_Original.json +1 -1
  24. psmathur/orca_mini_13b_eval_request_False_float16_Original.json +1 -1
  25. reeducator/vicuna-13b-free_eval_request_False_4bit_Original.json +1 -1
  26. togethercomputer/LLaMA-2-7B-32K_eval_request_False_4bit_Original.json +1 -1
Aspik101/vicuna-13b-v1.5-PL-lora_unload_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "Aspik101/vicuna-13b-v1.5-PL-lora_unload", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "263038"}
 
1
+ {"model": "Aspik101/vicuna-13b-v1.5-PL-lora_unload", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "263038"}
Austism/chronos-hermes-13b-v2_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "Austism/chronos-hermes-13b-v2", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "471937"}
 
1
+ {"model": "Austism/chronos-hermes-13b-v2", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "471937"}
CalderaAI/13B-Legerdemain-L2_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "CalderaAI/13B-Legerdemain-L2", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "289181"}
 
1
+ {"model": "CalderaAI/13B-Legerdemain-L2", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "289181"}
Gryphe/MythoLogic-L2-13b_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "Gryphe/MythoLogic-L2-13b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "472522"}
 
1
+ {"model": "Gryphe/MythoLogic-L2-13b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "472522"}
MrNJK/gpt2-xl-sft_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "MrNJK/gpt2-xl-sft", "base_model": "gpt2-xl", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "461621"}
 
1
+ {"model": "MrNJK/gpt2-xl-sft", "base_model": "gpt2-xl", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "461621"}
Open-Orca/OpenOrcaxOpenChat-Preview2-13B_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "Open-Orca/OpenOrcaxOpenChat-Preview2-13B", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "279143"}
 
1
+ {"model": "Open-Orca/OpenOrcaxOpenChat-Preview2-13B", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "279143"}
The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "463418"}
 
1
+ {"model": "The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "463418"}
ToolBench/ToolLLaMA-7b-LoRA_eval_request_False_float16_Adapter.json CHANGED
@@ -1 +1 @@
1
- {"model": "ToolBench/ToolLLaMA-7b-LoRA", "base_model": "TheBloke/Llama-2-7B-fp16", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "462022"}
 
1
+ {"model": "ToolBench/ToolLLaMA-7b-LoRA", "base_model": "TheBloke/Llama-2-7B-fp16", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "462022"}
bhenrym14/airophin-13b-pntk-16k-fp16_eval_request_False_float16_Adapter.json CHANGED
@@ -1 +1 @@
1
- {"model": "bhenrym14/airophin-13b-pntk-16k-fp16", "base_model": "huggyllama/llama-30b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "461912"}
 
1
+ {"model": "bhenrym14/airophin-13b-pntk-16k-fp16", "base_model": "huggyllama/llama-30b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "461912"}
bhenrym14/airophin-13b-pntk-16k-fp16_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "bhenrym14/airophin-13b-pntk-16k-fp16", "base_model": "huggyllama/llama-13b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "275123"}
 
1
+ {"model": "bhenrym14/airophin-13b-pntk-16k-fp16", "base_model": "huggyllama/llama-13b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "275123"}
chansung/gpt4-alpaca-lora-13b-decapoda-1024_eval_request_False_float16_Adapter.json CHANGED
@@ -1 +1 @@
1
- {"model": "chansung/gpt4-alpaca-lora-13b-decapoda-1024", "base_model": "huggyllama/llama-13b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "461532"}
 
1
+ {"model": "chansung/gpt4-alpaca-lora-13b-decapoda-1024", "base_model": "huggyllama/llama-13b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "461532"}
circulus/Llama-2-13b-orca-v1_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "circulus/Llama-2-13b-orca-v1", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "461484"}
 
1
+ {"model": "circulus/Llama-2-13b-orca-v1", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "461484"}
circulus/Llama-2-13b-orca-v1_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "circulus/Llama-2-13b-orca-v1", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "461311"}
 
1
+ {"model": "circulus/Llama-2-13b-orca-v1", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "461311"}
conceptofmind/LLongMA-2-13b-16k_eval_request_False_bfloat16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "conceptofmind/LLongMA-2-13b-16k", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "472541"}
 
1
+ {"model": "conceptofmind/LLongMA-2-13b-16k", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "472541"}
elinas/chronos-13b-v2_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "elinas/chronos-13b-v2", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "472501"}
 
1
+ {"model": "elinas/chronos-13b-v2", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "472501"}
golaxy/goims_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "golaxy/goims", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "285274"}
 
1
+ {"model": "golaxy/goims", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "285274"}
jarradh/llama2_70b_chat_uncensored_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "jarradh/llama2_70b_chat_uncensored", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "283161"}
 
1
+ {"model": "jarradh/llama2_70b_chat_uncensored", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : fine-tuned", "job_id": "283161"}
jlevin/guanaco-unchained-llama-2-7b_eval_request_False_4bit_Adapter.json CHANGED
@@ -1 +1 @@
1
- {"model": "jlevin/guanaco-unchained-llama-2-7b", "base_model": "TheBloke/Llama-2-7B-fp16", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "461674"}
 
1
+ {"model": "jlevin/guanaco-unchained-llama-2-7b", "base_model": "TheBloke/Llama-2-7B-fp16", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "461674"}
jondurbin/airoboros-l2-70b-gpt4-2.0_eval_request_False_4bit_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "jondurbin/airoboros-l2-70b-gpt4-2.0", "base_model": "", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "360968"}
 
1
+ {"model": "jondurbin/airoboros-l2-70b-gpt4-2.0", "base_model": "", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "360968"}
jordiclive/gpt4all-alpaca-oa-codealpaca-lora-13b_eval_request_False_float16_Adapter.json CHANGED
@@ -1 +1 @@
1
- {"model": "jordiclive/gpt4all-alpaca-oa-codealpaca-lora-13b", "base_model": "huggyllama/llama-13b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "471268"}
 
1
+ {"model": "jordiclive/gpt4all-alpaca-oa-codealpaca-lora-13b", "base_model": "huggyllama/llama-13b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "471268"}
lmsys/vicuna-13b-v1.5-16k_eval_request_False_8bit_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "lmsys/vicuna-13b-v1.5-16k", "base_model": "", "revision": "main", "private": false, "precision": "8bit", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "461001"}
 
1
+ {"model": "lmsys/vicuna-13b-v1.5-16k", "base_model": "", "revision": "main", "private": false, "precision": "8bit", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "461001"}
pleisto/tianpeng-lora-30B_eval_request_False_float16_Adapter.json CHANGED
@@ -1 +1 @@
1
- {"model": "pleisto/tianpeng-lora-30B", "base_model": "huggyllama/llama-30b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FAILED", "submitted_time": "2023-08-29T14:05:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "396926"}
 
1
+ {"model": "pleisto/tianpeng-lora-30B", "base_model": "huggyllama/llama-30b", "revision": "main", "private": false, "precision": "float16", "weight_type": "Adapter", "status": "FAILED", "submitted_time": "2023-08-29T14:05:17Z", "model_type": "fine-tuned", "job_id": "396926"}
psmathur/model_51_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "psmathur/model_51", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "271112"}
 
1
+ {"model": "psmathur/model_51", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "271112"}
psmathur/orca_mini_13b_eval_request_False_float16_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "psmathur/orca_mini_13b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "267067"}
 
1
+ {"model": "psmathur/orca_mini_13b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "267067"}
reeducator/vicuna-13b-free_eval_request_False_4bit_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "reeducator/vicuna-13b-free", "base_model": "", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Original", "status": "FAILED", "submitted_time": "2023-08-25T13:05:42Z", "model_type": "\ud83d\udd36 : finetuned", "job_id": "391859"}
 
1
+ {"model": "reeducator/vicuna-13b-free", "base_model": "", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Original", "status": "FAILED", "submitted_time": "2023-08-25T13:05:42Z", "model_type": "fine-tuned", "job_id": "391859"}
togethercomputer/LLaMA-2-7B-32K_eval_request_False_4bit_Original.json CHANGED
@@ -1 +1 @@
1
- {"model": "togethercomputer/LLaMA-2-7B-32K", "base_model": "", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "finetuned", "job_id": "267073"}
 
1
+ {"model": "togethercomputer/LLaMA-2-7B-32K", "base_model": "", "revision": "main", "private": false, "precision": "4bit", "weight_type": "Original", "status": "PENDING_NEW_EVAL", "submitted_time": "2023-09-09T10:52:17Z", "model_type": "fine-tuned", "job_id": "267073"}