File size: 4,324 Bytes
4060d6d
 
 
 
 
 
 
 
 
 
 
 
f582647
4060d6d
f582647
4060d6d
 
f582647
 
 
4060d6d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1bbfb6f
 
4060d6d
 
 
 
1bbfb6f
 
 
 
4060d6d
 
 
 
 
 
 
 
1bbfb6f
4060d6d
 
 
 
 
 
 
 
 
 
 
1bbfb6f
 
 
4060d6d
 
 
 
 
 
 
 
 
 
 
f77fa43
f582647
 
 
4060d6d
 
 
 
f582647
4060d6d
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import gradio as gr
from gradio_leaderboard import Leaderboard
from pathlib import Path
import pandas as pd

import os

import json 

from envs import API, EVAL_REQUESTS_PATH, TOKEN, QUEUE_REPO


def submit(model_name, model_id, challenge, submission_id, paper_link, architecture, license):
    
    if model_name == "" or model_id == "" or challenge == "" or architecture == "" or license == "":
        gr.Error("Please fill all the fields")
        return
    if submission_id == "" and paper_link =="":
        gr.Error("Provide either a link to a paper describing the method or a submission ID for the MLSB workshop.")
        return
    try:
        user_name = ""
        if "/" in model_id:
            user_name = model_id.split("/")[0]
            model_path = model_id.split("/")[1]

        eval_entry = {
            "model_name": model_name,
            "model_id": model_id,
            "challenge": challenge,
            "submission_id": submission_id,
            "architecture": architecture,
            "license": license
        }
        OUT_DIR = f"{EVAL_REQUESTS_PATH}/{user_name}"
        os.makedirs(OUT_DIR, exist_ok=True)
        out_path = f"{OUT_DIR}/{user_name}_{model_path}.json"

        with open(out_path, "w") as f:
            f.write(json.dumps(eval_entry))

        print("Uploading eval file")
        API.upload_file(
            path_or_fileobj=out_path,
            path_in_repo=out_path.split("eval-queue/")[1],
            repo_id=QUEUE_REPO,
            repo_type="dataset",
            commit_message=f"Add {model_name} to eval queue",
        )
        gr.Info("Successfully submitted", duration=10)
        # Remove the local file
        os.remove(out_path)
    except:
        gr.Error("Error submitting the model")





abs_path = Path(__file__).parent

# Any pandas-compatible data
pinder_df = pd.read_json(str(abs_path / "leaderboard_pinder.json"))
plinder_df = pd.read_json(str(abs_path / "leaderboard_plinder.json"))

with gr.Blocks() as demo:
    gr.Markdown("""
    # MLSB 2024 Challenges

    Please find more details about the challenge on [mlsb.io/#challenge](https://www.mlsb.io/#challenge).

    This competition is run together with VantAI, NVidia, Huggingface & University of Basel.
    """)


    with gr.Tab("πŸŽ–οΈ PINDER Leaderboard"):
        gr.Markdown("""## PINDER Leaderboard
                Evaluating Protein-Protein interaction prediction
                """)
        Leaderboard(
        value=pinder_df,
        select_columns=["Arch", "Model", "L_rms", "I_rms",
            "F_nat", "DOCKQ", "CAPRI"],
        search_columns=["model_name_for_query"],
        hide_columns=["model_name_for_query",],
        filter_columns=["Arch"],
    )
    with gr.Tab("πŸ₯‡ PLINDER Leaderboard"):
        gr.Markdown("""## PLINDER Leaderboard
                Evaluating Protein-Ligand prediction
                """)
        Leaderboard(
        value=plinder_df,
        select_columns=["Arch", "Model", "Mean lDDT-PLI", "Median RMSD",
            "Success Rate (% lDDT-PLI >= 0.7)"],
        search_columns=["model_name_for_query"],
        hide_columns=["model_name_for_query",],
        filter_columns=["Arch"],
    )
    with gr.Tab("βœ‰οΈ Submit"):
        gr.Markdown("""## Submit your model
                Submit your model to the leaderboard
                """)
        model_name = gr.Textbox(label="Model name")
        model_id = gr.Textbox(label="username/space e.g mlsb/alphafold3")
        challenge = gr.Radio(choices=["PINDER", "PLINDER"],label="Challenge")
        gr.Markdown("Either give a submission id if you submitted to the MLSB workshop or provide a link to the preprint/paper describing the method.")
        with gr.Row():
            submission_id = gr.Textbox(label="Submission ID on CMT")
            paper_link = gr.Textbox(label="Preprint or Paper link")
        architecture = gr.Dropdown(choices=["GNN", "CNN", "Physics-based", "Other"],label="Model architecture")
        license = gr.Dropdown(choices=["mit", "apache-2.0", "gplv2", "gplv3", "lgpl", "mozilla", "bsd", "other"],label="License")
        submit_btn = gr.Button("Submit")

        submit_btn.click(submit, inputs=[model_name, model_id, challenge, submission_id, paper_link, architecture, license], outputs=[])

if __name__ == "__main__":
    demo.launch()