Clémentine commited on
Commit
301c384
1 Parent(s): e3a8804

Updated model info to get number of parameters in almost all cases, even without safetensors

Browse files
app.py CHANGED
@@ -255,7 +255,7 @@ def filter_models(
255
  if show_deleted:
256
  filtered_df = df
257
  else: # Show only still on the hub models
258
- filtered_df = df[df[AutoEvalColumn.still_on_hub.name] is True]
259
 
260
  type_emoji = [t[0] for t in type_query]
261
  filtered_df = filtered_df[df[AutoEvalColumn.model_type_symbol.name].isin(type_emoji)]
 
255
  if show_deleted:
256
  filtered_df = df
257
  else: # Show only still on the hub models
258
+ filtered_df = df[df[AutoEvalColumn.still_on_hub.name] == True]
259
 
260
  type_emoji = [t[0] for t in type_query]
261
  filtered_df = filtered_df[df[AutoEvalColumn.model_type_symbol.name].isin(type_emoji)]
requirements.txt CHANGED
@@ -1,3 +1,4 @@
 
1
  aiofiles==23.1.0
2
  aiohttp==3.8.4
3
  aiosignal==1.3.1
 
1
+ accelerate==0.23.0
2
  aiofiles==23.1.0
3
  aiohttp==3.8.4
4
  aiosignal==1.3.1
src/display_models/get_model_metadata.py CHANGED
@@ -8,6 +8,8 @@ from typing import List
8
  import huggingface_hub
9
  from huggingface_hub import HfApi
10
  from tqdm import tqdm
 
 
11
 
12
  from src.display_models.model_metadata_flags import DO_NOT_SUBMIT_MODELS, FLAGGED_MODELS
13
  from src.display_models.model_metadata_type import MODEL_TYPE_METADATA, ModelType, model_type_from_str
@@ -69,11 +71,17 @@ def get_model_size(model_name, model_info):
69
  return round(model_info.safetensors["total"] / 1e9, 3)
70
  except AttributeError:
71
  try:
72
- size_match = re.search(size_pattern, model_name.lower())
73
- size = size_match.group(0)
74
- return round(float(size[:-1]) if size[-1] == "b" else float(size[:-1]) / 1e3, 3)
75
- except AttributeError:
76
- return 0
 
 
 
 
 
 
77
 
78
 
79
  def get_model_type(leaderboard_data: List[dict]):
 
8
  import huggingface_hub
9
  from huggingface_hub import HfApi
10
  from tqdm import tqdm
11
+ from transformers import AutoModel, AutoConfig
12
+ from accelerate import init_empty_weights
13
 
14
  from src.display_models.model_metadata_flags import DO_NOT_SUBMIT_MODELS, FLAGGED_MODELS
15
  from src.display_models.model_metadata_type import MODEL_TYPE_METADATA, ModelType, model_type_from_str
 
71
  return round(model_info.safetensors["total"] / 1e9, 3)
72
  except AttributeError:
73
  try:
74
+ config = AutoConfig.from_pretrained(model_name, trust_remote_code=False)
75
+ with init_empty_weights():
76
+ model = AutoModel.from_config(config, trust_remote_code=False)
77
+ return round(sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e9, 3)
78
+ except (EnvironmentError, ValueError): # model config not found, likely private
79
+ try:
80
+ size_match = re.search(size_pattern, model_name.lower())
81
+ size = size_match.group(0)
82
+ return round(float(size[:-1]) if size[-1] == "b" else float(size[:-1]) / 1e3, 3)
83
+ except AttributeError:
84
+ return 0
85
 
86
 
87
  def get_model_type(leaderboard_data: List[dict]):