Spaces:
Running
Running
File size: 2,071 Bytes
d262fb3 fbc8c87 d262fb3 fbc8c87 d262fb3 a894537 d262fb3 a894537 80a8e25 d262fb3 a894537 d262fb3 a894537 d262fb3 a894537 0321f62 a894537 014409b a894537 0321f62 a894537 0321f62 a894537 d262fb3 a894537 d262fb3 e747f4e a894537 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
from huggingface_hub import HfApi, Repository
import gradio as gr
import json
def change_tab(query_param):
query_param = query_param.replace("'", '"')
query_param = json.loads(query_param)
if (
isinstance(query_param, dict)
and "tab" in query_param
and query_param["tab"] == "plot"
):
return gr.Tabs.update(selected=1)
else:
return gr.Tabs.update(selected=0)
def restart_space(LLM_PERF_LEADERBOARD_REPO, OPTIMUM_TOKEN):
HfApi().restart_space(repo_id=LLM_PERF_LEADERBOARD_REPO, token=OPTIMUM_TOKEN)
def load_dataset_repo(LLM_PERF_DATASET_REPO, OPTIMUM_TOKEN):
llm_perf_dataset_repo = None
if OPTIMUM_TOKEN:
print("Loading LLM-Perf-Dataset from Hub...")
llm_perf_dataset_repo = Repository(
local_dir="./llm-perf-dataset",
clone_from=LLM_PERF_DATASET_REPO,
token=OPTIMUM_TOKEN,
repo_type="dataset",
)
llm_perf_dataset_repo.git_pull()
return llm_perf_dataset_repo
LLM_MODEL_TYPES = {
# branded ?
"gpt_bigcode": "GPT-BigCode πΈ",
"RefinedWebModel": "Falcon π¦
",
"RefinedWeb": "Falcon π¦
",
"baichuan": "Baichuan π",
"bloom": "Bloom πΈ",
"llama": "LLaMA π¦",
# unbranded ? suggest something
"stablelm_alpha": "StableLM-Alpha",
"gpt_neox": "GPT-NeoX",
"gpt_neo": "GPT-Neo",
"codegen": "CodeGen",
"chatglm": "ChatGLM",
"gpt2": "GPT-2",
"gptj": "GPT-J",
"xglm": "XGLM",
"rwkv": "RWKV",
"bart": "BART",
"opt": "OPT",
"mpt": "MPT",
}
def model_hyperlink(link, model_name):
return f'<a target="_blank" href="{link}" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">{model_name}</a>'
def process_model_name(model_name):
link = f"https://huggingface.co/{model_name}"
return model_hyperlink(link, model_name)
def process_model_type(model_type):
if model_type in LLM_MODEL_TYPES:
return LLM_MODEL_TYPES[model_type]
else:
return model_type
|