Spaces:
Running
Running
File size: 3,578 Bytes
4060d6d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
import gradio as gr
from gradio_leaderboard import Leaderboard
from pathlib import Path
import pandas as pd
import os
import json
from envs import API, EVAL_REQUESTS_PATH, TOKEN, QUEUE_REPO
def submit(model_name, model_id, challenge, submission_id, architecture, license):
if model_name == "" or model_id == "" or challenge == "" or submission_id == "" or architecture == "" or license == "":
gr.Error("Please fill all the fields")
return
try:
user_name = ""
if "/" in model_id:
user_name = model_id.split("/")[0]
model_path = model_id.split("/")[1]
eval_entry = {
"model_name": model_name,
"model_id": model_id,
"challenge": challenge,
"submission_id": submission_id,
"architecture": architecture,
"license": license
}
OUT_DIR = f"{EVAL_REQUESTS_PATH}/{user_name}"
os.makedirs(OUT_DIR, exist_ok=True)
out_path = f"{OUT_DIR}/{user_name}_{model_path}.json"
with open(out_path, "w") as f:
f.write(json.dumps(eval_entry))
print("Uploading eval file")
API.upload_file(
path_or_fileobj=out_path,
path_in_repo=out_path.split("eval-queue/")[1],
repo_id=QUEUE_REPO,
repo_type="dataset",
commit_message=f"Add {model_name} to eval queue",
)
gr.Info("Successfully submitted", duration=10)
# Remove the local file
os.remove(out_path)
except:
gr.Error("Error submitting the model")
abs_path = Path(__file__).parent
# Any pandas-compatible data
df = pd.read_json(str(abs_path / "leaderboard_data.json"))
with gr.Blocks() as demo:
gr.Markdown("""
# MLSB 2024 Challenges
""")
with gr.Tab("🎖️ PINDER Leaderboard"):
gr.Markdown("""## PINDER Leaderboard
Evaluating Protein-Protein interaction prediction
""")
Leaderboard(
value=df,
select_columns=["Arch", "Model", "L_rms", "I_rms",
"F_nat", "DOCKQ", "CAPRI"],
search_columns=["model_name_for_query"],
hide_columns=["model_name_for_query",],
filter_columns=["Arch"],
)
with gr.Tab("🥇 PLINDER Leaderboard"):
gr.Markdown("""## PLINDER Leaderboard
Evaluating Protein-Ligand prediction
""")
Leaderboard(
value=df,
select_columns=["Arch", "Model", "L_rms", "I_rms",
"F_nat", "DOCKQ", "CAPRI"],
search_columns=["model_name_for_query"],
hide_columns=["model_name_for_query",],
filter_columns=["Arch"],
)
with gr.Tab("✉️ Submit"):
gr.Markdown("""## Submit your model
Submit your model to the leaderboard
""")
model_name = gr.Textbox(label="Model name")
model_id = gr.Textbox(label="username/space e.g mlsb/alphafold3")
challenge = gr.Radio(choices=["PINDER", "PLINDER"],label="Challenge")
submission_id = gr.Textbox(label="Submission ID on CMT")
architecture = gr.Dropdown(choices=["GNN", "CNN", "Physics-based", "Other"],label="Model architecture")
license = gr.Dropdown(choices=["mit", "apache-2.0", "gplv2", "gplv3", "lgpl", "mozilla", "bsd", "other"],label="License")
submit_btn = gr.Button("Submit")
submit_btn.click(submit, inputs=[model_name, model_id, challenge, submission_id, architecture, license], outputs=[])
if __name__ == "__main__":
demo.launch() |