yinanhe commited on
Commit
014f3a8
1 Parent(s): 2ae3b27

[update] overall score

Browse files
Files changed (1) hide show
  1. app.py +23 -8
app.py CHANGED
@@ -64,11 +64,26 @@ def add_new_eval(
64
  submission_repo.push_to_hub()
65
  return 0
66
 
67
- def get_final_score(df):
68
- # 分数计算公式
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  final_score = df.drop('name', axis=1).sum(axis=1)
70
- # 将总分列放在第二列
71
- df.insert(1, 'Final Score', final_score)
 
72
  return df
73
 
74
  def get_baseline_df():
@@ -76,8 +91,8 @@ def get_baseline_df():
76
  submission_repo.git_pull()
77
  df = pd.read_csv(CSV_DIR)
78
  df = get_final_score(df)
79
- # calculate the final score
80
- df = df.sort_values(by="Final Score", ascending=False)
81
  present_columns = MODEL_INFO + checkbox_group.value
82
  df = df[present_columns]
83
  return df
@@ -87,7 +102,7 @@ def get_all_df():
87
  submission_repo.git_pull()
88
  df = pd.read_csv(CSV_DIR)
89
  df = get_final_score(df)
90
- df = df.sort_values(by="Final Score", ascending=False)
91
  return df
92
 
93
  def on_filter_model_size_method_change(selected_columns):
@@ -138,7 +153,7 @@ with block:
138
  # selection for column part:
139
  checkbox_group = gr.CheckboxGroup(
140
  choices=TASK_INFO,
141
- value=AVG_INFO,
142
  label="Evaluation Dimension",
143
  interactive=True,
144
  )
 
64
  submission_repo.push_to_hub()
65
  return 0
66
 
67
+ def get_normalized_df(df):
68
+ # final_score = df.drop('name', axis=1).sum(axis=1)
69
+ # df.insert(1, 'Overall Score', final_score)
70
+ normalize_df = df.copy()
71
+ for column in normalize_df.columns[1:]:
72
+ min_val = NORMALIZE_DIC[column]['Min']
73
+ max_val = NORMALIZE_DIC[column]['Max']
74
+ normalize_df[column] = (normalize_df[column] - min_val) / (max_val - min_val)
75
+ return normalize_df
76
+
77
+ def calculate_selected_score(df, selected_columns):
78
+ selected_score = df[selected_columns].sum(axis=1)
79
+ return selected_score
80
+
81
+ def get_final_score(df, selected_columns):
82
+ normalize_df = get_normalized_df(df)
83
  final_score = df.drop('name', axis=1).sum(axis=1)
84
+ df.insert(1, 'Overall Score', final_score)
85
+ selected_score = calculate_selected_score(normalize_df, selected_columns)
86
+ df.insert(1, 'Selected Score', selected_score)
87
  return df
88
 
89
  def get_baseline_df():
 
91
  submission_repo.git_pull()
92
  df = pd.read_csv(CSV_DIR)
93
  df = get_final_score(df)
94
+ # calculate the Overall Score
95
+ df = df.sort_values(by="Overall Score", ascending=False)
96
  present_columns = MODEL_INFO + checkbox_group.value
97
  df = df[present_columns]
98
  return df
 
102
  submission_repo.git_pull()
103
  df = pd.read_csv(CSV_DIR)
104
  df = get_final_score(df)
105
+ df = df.sort_values(by="Selected Score", ascending=False)
106
  return df
107
 
108
  def on_filter_model_size_method_change(selected_columns):
 
153
  # selection for column part:
154
  checkbox_group = gr.CheckboxGroup(
155
  choices=TASK_INFO,
156
+ value=DEFAULT_INFO,
157
  label="Evaluation Dimension",
158
  interactive=True,
159
  )