Lakoc commited on
Commit
23931c3
β€’
1 Parent(s): cda9ae2

v0.0.2 added model details, proper ranking and modal for confirmation

Browse files
Files changed (5) hide show
  1. .gitattributes +20 -0
  2. app.py +54 -13
  3. content.py +15 -1
  4. model_compare.py +0 -34
  5. server.py +32 -13
.gitattributes CHANGED
@@ -8,6 +8,7 @@
8
  *.h5 filter=lfs diff=lfs merge=lfs -text
9
  *.joblib filter=lfs diff=lfs merge=lfs -text
10
  *.lfs.* filter=lfs diff=lfs merge=lfs -text
 
11
  *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
  *.model filter=lfs diff=lfs merge=lfs -text
13
  *.msgpack filter=lfs diff=lfs merge=lfs -text
@@ -33,3 +34,22 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  *.h5 filter=lfs diff=lfs merge=lfs -text
9
  *.joblib filter=lfs diff=lfs merge=lfs -text
10
  *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.lz4 filter=lfs diff=lfs merge=lfs -text
12
  *.mlmodel filter=lfs diff=lfs merge=lfs -text
13
  *.model filter=lfs diff=lfs merge=lfs -text
14
  *.msgpack filter=lfs diff=lfs merge=lfs -text
 
34
  *.zip filter=lfs diff=lfs merge=lfs -text
35
  *.zst filter=lfs diff=lfs merge=lfs -text
36
  *tfevents* filter=lfs diff=lfs merge=lfs -text
37
+ # Audio files - uncompressed
38
+ *.pcm filter=lfs diff=lfs merge=lfs -text
39
+ *.sam filter=lfs diff=lfs merge=lfs -text
40
+ *.raw filter=lfs diff=lfs merge=lfs -text
41
+ # Audio files - compressed
42
+ *.aac filter=lfs diff=lfs merge=lfs -text
43
+ *.flac filter=lfs diff=lfs merge=lfs -text
44
+ *.mp3 filter=lfs diff=lfs merge=lfs -text
45
+ *.ogg filter=lfs diff=lfs merge=lfs -text
46
+ *.wav filter=lfs diff=lfs merge=lfs -text
47
+ # Image files - uncompressed
48
+ *.bmp filter=lfs diff=lfs merge=lfs -text
49
+ *.gif filter=lfs diff=lfs merge=lfs -text
50
+ *.png filter=lfs diff=lfs merge=lfs -text
51
+ *.tiff filter=lfs diff=lfs merge=lfs -text
52
+ # Image files - compressed
53
+ *.jpg filter=lfs diff=lfs merge=lfs -text
54
+ *.jpeg filter=lfs diff=lfs merge=lfs -text
55
+ *.webp filter=lfs diff=lfs merge=lfs -text
app.py CHANGED
@@ -3,8 +3,10 @@ import os
3
  import gradio as gr
4
  import pandas as pd
5
  from gradio.themes.utils.sizes import text_md
6
-
7
- from content import (HEADER_MARKDOWN, LEADERBOARD_TAB_TITLE_MARKDOWN, SUBMISSION_TAB_TITLE_MARKDOWN)
 
 
8
  from server import LeaderboardServer
9
 
10
  leaderboard_server = LeaderboardServer()
@@ -48,14 +50,28 @@ def submit_results():
48
  return gr.update(value='Pre-submit model', visible=True, interactive=True), gr.update(
49
  visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(
50
  visible=False), gr.update(visible=False), gr.DataFrame(
51
- value=leaderboard_server.get_leaderboard(), visible=True)
52
 
53
 
54
- def erase_presubmit():
55
  leaderboard_server.pre_submit = None
56
  return gr.update(value='Pre-submit model', visible=True, interactive=True), gr.update(
57
  visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(
58
- visible=False), gr.update(visible=False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
 
61
  with (gr.Blocks(theme=gr.themes.Soft(text_size=text_md), css="footer {visibility: hidden}") as main):
@@ -71,6 +87,19 @@ with (gr.Blocks(theme=gr.themes.Soft(text_size=text_md), css="footer {visibility
71
  results_table = gr.DataFrame(leaderboard_server.get_leaderboard(), interactive=False, label=None,
72
  visible=True)
73
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  with gr.Tab('Submission'):
75
  with gr.Column():
76
  gr.Markdown(SUBMISSION_TAB_TITLE_MARKDOWN)
@@ -86,16 +115,12 @@ with (gr.Blocks(theme=gr.themes.Soft(text_size=text_md), css="footer {visibility
86
  pre_submission_btn = gr.Button(value='Pre-submit model', interactive=True)
87
 
88
  submit_prompt = gr.Markdown(
89
- """
90
- Do you really want to submit a model? This action is irreversible.
91
- """,
92
  visible=False
93
  )
94
 
95
  pre_submit_info = gr.Markdown(
96
- """
97
- This is how will ranking look like after your submission:
98
- """,
99
  visible=False
100
  )
101
 
@@ -104,6 +129,11 @@ with (gr.Blocks(theme=gr.themes.Soft(text_size=text_md), css="footer {visibility
104
  submission_btn_yes = gr.Button(value='Submit model', interactive=False, visible=False)
105
  submission_btn_no = gr.Button(value='Reverse process', interactive=False, visible=False)
106
 
 
 
 
 
 
107
  pre_submission_btn.click(
108
  fn=on_submit_pressed,
109
  outputs=[pre_submission_btn]
@@ -116,12 +146,23 @@ with (gr.Blocks(theme=gr.themes.Soft(text_size=text_md), css="footer {visibility
116
  )
117
 
118
  submission_btn_yes.click(
 
 
 
 
 
119
  fn=submit_results,
120
  outputs=[pre_submission_btn, submission_btn_yes, submission_btn_no, submit_prompt, pre_submit_info,
121
- pre_submit_table, results_table]
 
 
 
 
 
122
  )
 
123
  submission_btn_no.click(
124
- fn=erase_presubmit,
125
  outputs=[pre_submission_btn, submission_btn_yes, submission_btn_no, submit_prompt, pre_submit_info,
126
  pre_submit_table]
127
  )
 
3
  import gradio as gr
4
  import pandas as pd
5
  from gradio.themes.utils.sizes import text_md
6
+ from gradio_modal import Modal
7
+ from content import (HEADER_MARKDOWN, LEADERBOARD_TAB_TITLE_MARKDOWN, SUBMISSION_TAB_TITLE_MARKDOWN,
8
+ MODAL_SUBMIT_MARKDOWN,
9
+ SUBMISSION_DETAILS_MARKDOWN, RANKING_AFTER_SUBMISSION_MARKDOWN, MORE_DETAILS_MARKDOWN)
10
  from server import LeaderboardServer
11
 
12
  leaderboard_server = LeaderboardServer()
 
50
  return gr.update(value='Pre-submit model', visible=True, interactive=True), gr.update(
51
  visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(
52
  visible=False), gr.update(visible=False), gr.DataFrame(
53
+ value=leaderboard_server.get_leaderboard(), visible=True), gr.update(visible=False)
54
 
55
 
56
+ def erase_pre_submit():
57
  leaderboard_server.pre_submit = None
58
  return gr.update(value='Pre-submit model', visible=True, interactive=True), gr.update(
59
  visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(
60
+ visible=False), gr.update(visible=False), gr.update(visible=False)
61
+
62
+
63
+ def fetch_model_detail(submission_id):
64
+ metadata = leaderboard_server.get_model_detail(submission_id)
65
+ return gr.update(value=metadata['description'], visible=True), gr.update(value=metadata['link_to_model'],
66
+ visible=True)
67
+
68
+
69
+ def show_modal():
70
+ return gr.update(visible=True)
71
+
72
+
73
+ def hide_modal():
74
+ return gr.update(visible=False)
75
 
76
 
77
  with (gr.Blocks(theme=gr.themes.Soft(text_size=text_md), css="footer {visibility: hidden}") as main):
 
87
  results_table = gr.DataFrame(leaderboard_server.get_leaderboard(), interactive=False, label=None,
88
  visible=True)
89
 
90
+ gr.Markdown(MORE_DETAILS_MARKDOWN)
91
+ detail_dropdown = gr.Dropdown(choices=leaderboard_server.submission_ids, label="Select model",
92
+ interactive=True)
93
+
94
+ with gr.Row():
95
+ model_description = gr.Text(value='', label='Model description', visible=False, interactive=False)
96
+ model_url = gr.Text(value='', label='Model url', visible=False, interactive=False)
97
+
98
+ detail_dropdown.change(
99
+ fn=fetch_model_detail,
100
+ inputs=[detail_dropdown],
101
+ outputs=[model_description, model_url])
102
+
103
  with gr.Tab('Submission'):
104
  with gr.Column():
105
  gr.Markdown(SUBMISSION_TAB_TITLE_MARKDOWN)
 
115
  pre_submission_btn = gr.Button(value='Pre-submit model', interactive=True)
116
 
117
  submit_prompt = gr.Markdown(
118
+ SUBMISSION_DETAILS_MARKDOWN,
 
 
119
  visible=False
120
  )
121
 
122
  pre_submit_info = gr.Markdown(
123
+ RANKING_AFTER_SUBMISSION_MARKDOWN,
 
 
124
  visible=False
125
  )
126
 
 
129
  submission_btn_yes = gr.Button(value='Submit model', interactive=False, visible=False)
130
  submission_btn_no = gr.Button(value='Reverse process', interactive=False, visible=False)
131
 
132
+ with Modal(visible=False) as modal_submit:
133
+ gr.Markdown(MODAL_SUBMIT_MARKDOWN)
134
+ modal_submit_yes = gr.Button("Yes", interactive=True)
135
+ modal_submit_no = gr.Button("No", interactive=True)
136
+
137
  pre_submission_btn.click(
138
  fn=on_submit_pressed,
139
  outputs=[pre_submission_btn]
 
146
  )
147
 
148
  submission_btn_yes.click(
149
+ fn=show_modal,
150
+ outputs=[modal_submit]
151
+ )
152
+
153
+ modal_submit_yes.click(
154
  fn=submit_results,
155
  outputs=[pre_submission_btn, submission_btn_yes, submission_btn_no, submit_prompt, pre_submit_info,
156
+ pre_submit_table, results_table, modal_submit]
157
+ )
158
+
159
+ modal_submit_no.click(
160
+ fn=hide_modal,
161
+ outputs=[modal_submit]
162
  )
163
+
164
  submission_btn_no.click(
165
+ fn=erase_pre_submit,
166
  outputs=[pre_submission_btn, submission_btn_yes, submission_btn_no, submit_prompt, pre_submit_info,
167
  pre_submit_table]
168
  )
content.py CHANGED
@@ -9,7 +9,7 @@ HEADER_MARKDOWN = """
9
 
10
  LEADERBOARD_TAB_TITLE_MARKDOWN = """
11
  ## Leaderboard
12
- The leaderboard below shows the current ranking of the models...
13
 
14
  """
15
 
@@ -26,3 +26,17 @@ SUBMISSION_TAB_TITLE_MARKDOWN = """
26
  This will run a comparison of your model with the existing leaderboard models.
27
  After the tournament is complete, you will be able to submit your model to the leaderboard.
28
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  LEADERBOARD_TAB_TITLE_MARKDOWN = """
11
  ## Leaderboard
12
+ The leaderboard below shows number of significant wins for each model followed by specific metrics...
13
 
14
  """
15
 
 
26
  This will run a comparison of your model with the existing leaderboard models.
27
  After the tournament is complete, you will be able to submit your model to the leaderboard.
28
  """
29
+
30
+ RANKING_AFTER_SUBMISSION_MARKDOWN = """
31
+ This is how will ranking look like after your submission:
32
+ """
33
+ SUBMISSION_DETAILS_MARKDOWN = """
34
+ Do you really want to submit a model? This action is irreversible.
35
+ """
36
+ MORE_DETAILS_MARKDOWN = """
37
+ ## Model details:
38
+ """
39
+
40
+ MODAL_SUBMIT_MARKDOWN = """
41
+ Are you sure you want to submit your model?
42
+ """
model_compare.py DELETED
@@ -1,34 +0,0 @@
1
- from functools import cmp_to_key
2
-
3
-
4
- class ModelCompare:
5
-
6
- def __init__(self, tasks, ranks: dict = None):
7
- self.current_task = None
8
- self.ranks = ranks
9
- self.tasks = tasks
10
-
11
- def compare_models(self, model_a, model_b):
12
- if not self.ranks:
13
- raise Exception("Missing model rankings")
14
-
15
- res = self.ranks[model_a][model_b][self.current_task]
16
- if res:
17
- return 1
18
- elif not res:
19
- return -1
20
- else:
21
- return -1
22
-
23
- def get_tasks_ranks(self, ranks: dict) -> dict:
24
- """Order models based on the significance improvement"""
25
-
26
- self.ranks = ranks
27
-
28
- tasks_ranks = {}
29
-
30
- models = ranks.keys()
31
- for task in self.tasks:
32
- self.current_task = task
33
- tasks_ranks[task] = sorted(models, key=cmp_to_key(self.compare_models))
34
- return tasks_ranks
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
server.py CHANGED
@@ -7,8 +7,9 @@ import gradio as gr
7
  import pandas as pd
8
  from huggingface_hub import HfApi, snapshot_download
9
 
10
- from compare_significance import check_significance
11
- from model_compare import ModelCompare
 
12
 
13
  api = HfApi()
14
 
@@ -27,7 +28,6 @@ class LeaderboardServer:
27
  self.submisssion_id_to_file = {} # Map submission ids to file paths
28
  self.tasks_metadata = json.load(open(TASKS_METADATA_PATH))['tasks']
29
  self.submission_ids = set()
30
- self.comparer = ModelCompare(self.tasks_metadata.keys())
31
  self.fetch_existing_models()
32
  self.tournament_results = self.load_tournament_results()
33
  self.pre_submit = None
@@ -59,14 +59,13 @@ class LeaderboardServer:
59
  self.submisssion_id_to_file[submission_id] = submission
60
 
61
  def get_leaderboard(self, tournament_results=None):
62
- rank_based_on = tournament_results if tournament_results else self.tournament_results
63
 
64
- if len(rank_based_on) == 0:
65
  return pd.DataFrame(columns=['No submissions yet'])
66
  else:
67
- ranks = self.comparer.get_tasks_ranks(rank_based_on)
68
- results = []
69
- for submission in rank_based_on.keys():
70
  path = self.submisssion_id_to_file.get(submission)
71
  if path is None:
72
  if self.pre_submit and submission == self.pre_submit[1]:
@@ -79,14 +78,27 @@ class LeaderboardServer:
79
  raise gr.Error(f"Submission [{submission}] not found")
80
  submission_id = data["metadata"]["team_name"] + "_" + data["metadata"]["submission_id"]
81
 
82
- local_results = {task: list(task_ranks).index(submission_id) + 1 for task, task_ranks in ranks.items()}
 
 
 
 
 
 
 
 
 
 
83
  local_results["submission_id"] = submission_id
 
84
  if self.pre_submit and submission == self.pre_submit[1]:
85
- results.insert(0, local_results)
86
  else:
87
- results.append(local_results)
88
- dataframe = pd.DataFrame.from_records(results)
89
- df_order = ["submission_id"] + [col for col in dataframe.columns if col != "submission_id"]
 
 
90
  dataframe = dataframe[df_order]
91
  dataframe = dataframe.rename(columns={key: value["name"] for key, value in self.tasks_metadata.items()})
92
  return dataframe
@@ -142,3 +154,10 @@ class LeaderboardServer:
142
  repo_type=self.repo_type,
143
  token=HF_TOKEN,
144
  )
 
 
 
 
 
 
 
 
7
  import pandas as pd
8
  from huggingface_hub import HfApi, snapshot_download
9
 
10
+ from compare_significance import check_significance, SUPPORTED_METRICS
11
+
12
+ VISIBLE_METRICS = SUPPORTED_METRICS + ["macro_f1"]
13
 
14
  api = HfApi()
15
 
 
28
  self.submisssion_id_to_file = {} # Map submission ids to file paths
29
  self.tasks_metadata = json.load(open(TASKS_METADATA_PATH))['tasks']
30
  self.submission_ids = set()
 
31
  self.fetch_existing_models()
32
  self.tournament_results = self.load_tournament_results()
33
  self.pre_submit = None
 
59
  self.submisssion_id_to_file[submission_id] = submission
60
 
61
  def get_leaderboard(self, tournament_results=None):
62
+ results = tournament_results if tournament_results else self.tournament_results
63
 
64
+ if len(results) == 0:
65
  return pd.DataFrame(columns=['No submissions yet'])
66
  else:
67
+ processed_results = []
68
+ for submission in results.keys():
 
69
  path = self.submisssion_id_to_file.get(submission)
70
  if path is None:
71
  if self.pre_submit and submission == self.pre_submit[1]:
 
78
  raise gr.Error(f"Submission [{submission}] not found")
79
  submission_id = data["metadata"]["team_name"] + "_" + data["metadata"]["submission_id"]
80
 
81
+ local_results = {}
82
+ for task in self.tasks_metadata.keys():
83
+ local_results[task] = 0
84
+ for model in results[submission].keys():
85
+ if results[submission][model][task]:
86
+ local_results[task] += 1
87
+ for metric in VISIBLE_METRICS:
88
+ metric_value = data['results'][task].get(metric)
89
+ if metric_value is not None:
90
+ local_results[task + "_" + metric] = metric_value
91
+
92
  local_results["submission_id"] = submission_id
93
+
94
  if self.pre_submit and submission == self.pre_submit[1]:
95
+ processed_results.insert(0, local_results)
96
  else:
97
+ processed_results.append(local_results)
98
+ dataframe = pd.DataFrame.from_records(processed_results)
99
+ df_order = (["submission_id"] + list(self.tasks_metadata.keys()) +
100
+ [col for col in dataframe.columns if
101
+ col != "submission_id" and col not in self.tasks_metadata.keys()])
102
  dataframe = dataframe[df_order]
103
  dataframe = dataframe.rename(columns={key: value["name"] for key, value in self.tasks_metadata.items()})
104
  return dataframe
 
154
  repo_type=self.repo_type,
155
  token=HF_TOKEN,
156
  )
157
+
158
+ def get_model_detail(self, submission_id):
159
+ path = self.submisssion_id_to_file.get(submission_id)
160
+ if path is None:
161
+ raise gr.Error(f"Submission [{submission_id}] not found")
162
+ data = json.load(open(path))
163
+ return data["metadata"]