Spaces:
Runtime error
Runtime error
remove bare except + format
Browse files- app.py +10 -19
- dashboard_utils/bubbles.py +12 -17
- dashboard_utils/main_metrics.py +4 -2
- dashboard_utils/time_tracker.py +11 -7
- streamlit_observable/__init__.py +12 -17
app.py
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
-
|
|
|
2 |
import streamlit as st
|
3 |
import wandb
|
4 |
-
import pandas as pd
|
5 |
-
import altair as alt
|
6 |
-
from streamlit_observable import observable
|
7 |
|
8 |
from dashboard_utils.bubbles import get_new_bubble_data
|
|
|
|
|
9 |
|
10 |
wandb.login(anonymous="must")
|
11 |
|
@@ -13,30 +13,21 @@ st.title("Training transformers together dashboard")
|
|
13 |
st.caption("Training Loss")
|
14 |
|
15 |
steps, losses, alive_peers = get_main_metrics()
|
16 |
-
source = pd.DataFrame({
|
17 |
-
"steps": steps, "loss":losses, "alive participants":alive_peers
|
18 |
-
})
|
19 |
|
20 |
-
chart_loss = alt.Chart(source).mark_line().encode(
|
21 |
-
|
22 |
-
y='loss'
|
23 |
-
)
|
24 |
-
st.altair_chart(chart_loss, use_container_width=True)
|
25 |
|
26 |
st.caption("Number of alive participants over time")
|
27 |
-
chart_alive_peer = alt.Chart(source).mark_line().encode(
|
28 |
-
|
29 |
-
y='alive participants'
|
30 |
-
)
|
31 |
-
st.altair_chart(chart_alive_peer, use_container_width=True)
|
32 |
|
33 |
st.header("Collaborative training participants")
|
34 |
serialized_data, profiles = get_new_bubble_data()
|
35 |
-
with st.spinner(
|
36 |
observers = observable(
|
37 |
"Participants",
|
38 |
notebook="d/9ae236a507f54046", # "@huggingface/participants-bubbles-chart",
|
39 |
targets=["c_noaws"],
|
40 |
redefine={"serializedData": serialized_data, "profileSimple": profiles},
|
41 |
)
|
42 |
-
|
|
|
1 |
+
import altair as alt
|
2 |
+
import pandas as pd
|
3 |
import streamlit as st
|
4 |
import wandb
|
|
|
|
|
|
|
5 |
|
6 |
from dashboard_utils.bubbles import get_new_bubble_data
|
7 |
+
from dashboard_utils.main_metrics import get_main_metrics
|
8 |
+
from streamlit_observable import observable
|
9 |
|
10 |
wandb.login(anonymous="must")
|
11 |
|
|
|
13 |
st.caption("Training Loss")
|
14 |
|
15 |
steps, losses, alive_peers = get_main_metrics()
|
16 |
+
source = pd.DataFrame({"steps": steps, "loss": losses, "alive participants": alive_peers})
|
|
|
|
|
17 |
|
18 |
+
chart_loss = alt.Chart(source).mark_line().encode(x="steps", y="loss")
|
19 |
+
st.altair_chart(chart_loss, use_container_width=True)
|
|
|
|
|
|
|
20 |
|
21 |
st.caption("Number of alive participants over time")
|
22 |
+
chart_alive_peer = alt.Chart(source).mark_line().encode(x="steps", y="alive participants")
|
23 |
+
st.altair_chart(chart_alive_peer, use_container_width=True)
|
|
|
|
|
|
|
24 |
|
25 |
st.header("Collaborative training participants")
|
26 |
serialized_data, profiles = get_new_bubble_data()
|
27 |
+
with st.spinner("Wait for it..."):
|
28 |
observers = observable(
|
29 |
"Participants",
|
30 |
notebook="d/9ae236a507f54046", # "@huggingface/participants-bubbles-chart",
|
31 |
targets=["c_noaws"],
|
32 |
redefine={"serializedData": serialized_data, "profileSimple": profiles},
|
33 |
)
|
|
dashboard_utils/bubbles.py
CHANGED
@@ -1,15 +1,16 @@
|
|
1 |
import datetime
|
|
|
2 |
from urllib import parse
|
3 |
|
4 |
-
from concurrent.futures import as_completed
|
5 |
-
from requests_futures.sessions import FuturesSession
|
6 |
-
import requests
|
7 |
import wandb
|
8 |
-
from
|
|
|
|
|
9 |
|
10 |
URL_QUICKSEARCH = "https://huggingface.co/api/quicksearch?"
|
11 |
WANDB_REPO = "learning-at-home/Worker_logs"
|
12 |
|
|
|
13 |
@simple_time_tracker(_log)
|
14 |
def get_new_bubble_data():
|
15 |
serialized_data_points, latest_timestamp = get_serialized_data_points()
|
@@ -18,12 +19,12 @@ def get_new_bubble_data():
|
|
18 |
|
19 |
return serialized_data, profiles
|
20 |
|
|
|
21 |
@simple_time_tracker(_log)
|
22 |
def get_profiles(serialized_data_points):
|
23 |
profiles = []
|
24 |
-
anonymous_taken = False
|
25 |
with FuturesSession() as session:
|
26 |
-
futures=[]
|
27 |
for username in serialized_data_points.keys():
|
28 |
future = session.get(URL_QUICKSEARCH + parse.urlencode({"type": "user", "q": username}))
|
29 |
future.username = username
|
@@ -35,7 +36,7 @@ def get_profiles(serialized_data_points):
|
|
35 |
avatarUrl = None
|
36 |
if response["users"]:
|
37 |
for user_candidate in response["users"]:
|
38 |
-
if user_candidate[
|
39 |
avatarUrl = response["users"][0]["avatarUrl"]
|
40 |
break
|
41 |
if not avatarUrl:
|
@@ -49,6 +50,7 @@ def get_profiles(serialized_data_points):
|
|
49 |
)
|
50 |
return profiles
|
51 |
|
|
|
52 |
@simple_time_tracker(_log)
|
53 |
def get_serialized_data_points():
|
54 |
|
@@ -62,7 +64,7 @@ def get_serialized_data_points():
|
|
62 |
run_name = run.name
|
63 |
|
64 |
if run_name in serialized_data_points:
|
65 |
-
|
66 |
timestamp = run_summary["_timestamp"]
|
67 |
serialized_data_points[run_name]["Runs"].append(
|
68 |
{
|
@@ -75,12 +77,8 @@ def get_serialized_data_points():
|
|
75 |
)
|
76 |
if not latest_timestamp or timestamp > latest_timestamp:
|
77 |
latest_timestamp = timestamp
|
78 |
-
except Exception as e:
|
79 |
-
pass
|
80 |
-
# print(e)
|
81 |
-
# print([key for key in list(run_summary.keys()) if "gradients" not in key])
|
82 |
else:
|
83 |
-
|
84 |
timestamp = run_summary["_timestamp"]
|
85 |
serialized_data_points[run_name] = {
|
86 |
"profileId": run_name,
|
@@ -96,13 +94,10 @@ def get_serialized_data_points():
|
|
96 |
}
|
97 |
if not latest_timestamp or timestamp > latest_timestamp:
|
98 |
latest_timestamp = timestamp
|
99 |
-
except Exception as e:
|
100 |
-
pass
|
101 |
-
# print(e)
|
102 |
-
# print([key for key in list(run_summary.keys()) if "gradients" not in key])
|
103 |
latest_timestamp = datetime.datetime.utcfromtimestamp(latest_timestamp)
|
104 |
return serialized_data_points, latest_timestamp
|
105 |
|
|
|
106 |
@simple_time_tracker(_log)
|
107 |
def get_serialized_data(serialized_data_points, latest_timestamp):
|
108 |
serialized_data_points_v2 = []
|
|
|
1 |
import datetime
|
2 |
+
from concurrent.futures import as_completed
|
3 |
from urllib import parse
|
4 |
|
|
|
|
|
|
|
5 |
import wandb
|
6 |
+
from requests_futures.sessions import FuturesSession
|
7 |
+
|
8 |
+
from dashboard_utils.time_tracker import _log, simple_time_tracker
|
9 |
|
10 |
URL_QUICKSEARCH = "https://huggingface.co/api/quicksearch?"
|
11 |
WANDB_REPO = "learning-at-home/Worker_logs"
|
12 |
|
13 |
+
|
14 |
@simple_time_tracker(_log)
|
15 |
def get_new_bubble_data():
|
16 |
serialized_data_points, latest_timestamp = get_serialized_data_points()
|
|
|
19 |
|
20 |
return serialized_data, profiles
|
21 |
|
22 |
+
|
23 |
@simple_time_tracker(_log)
|
24 |
def get_profiles(serialized_data_points):
|
25 |
profiles = []
|
|
|
26 |
with FuturesSession() as session:
|
27 |
+
futures = []
|
28 |
for username in serialized_data_points.keys():
|
29 |
future = session.get(URL_QUICKSEARCH + parse.urlencode({"type": "user", "q": username}))
|
30 |
future.username = username
|
|
|
36 |
avatarUrl = None
|
37 |
if response["users"]:
|
38 |
for user_candidate in response["users"]:
|
39 |
+
if user_candidate["user"] == username:
|
40 |
avatarUrl = response["users"][0]["avatarUrl"]
|
41 |
break
|
42 |
if not avatarUrl:
|
|
|
50 |
)
|
51 |
return profiles
|
52 |
|
53 |
+
|
54 |
@simple_time_tracker(_log)
|
55 |
def get_serialized_data_points():
|
56 |
|
|
|
64 |
run_name = run.name
|
65 |
|
66 |
if run_name in serialized_data_points:
|
67 |
+
if "_timestamp" in run_summary and "_step" in run_summary:
|
68 |
timestamp = run_summary["_timestamp"]
|
69 |
serialized_data_points[run_name]["Runs"].append(
|
70 |
{
|
|
|
77 |
)
|
78 |
if not latest_timestamp or timestamp > latest_timestamp:
|
79 |
latest_timestamp = timestamp
|
|
|
|
|
|
|
|
|
80 |
else:
|
81 |
+
if "_timestamp" in run_summary and "_step" in run_summary:
|
82 |
timestamp = run_summary["_timestamp"]
|
83 |
serialized_data_points[run_name] = {
|
84 |
"profileId": run_name,
|
|
|
94 |
}
|
95 |
if not latest_timestamp or timestamp > latest_timestamp:
|
96 |
latest_timestamp = timestamp
|
|
|
|
|
|
|
|
|
97 |
latest_timestamp = datetime.datetime.utcfromtimestamp(latest_timestamp)
|
98 |
return serialized_data_points, latest_timestamp
|
99 |
|
100 |
+
|
101 |
@simple_time_tracker(_log)
|
102 |
def get_serialized_data(serialized_data_points, latest_timestamp):
|
103 |
serialized_data_points_v2 = []
|
dashboard_utils/main_metrics.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1 |
-
from dashboard_utils.time_tracker import simple_time_tracker, _log
|
2 |
import wandb
|
3 |
|
|
|
|
|
4 |
WANDB_REPO = "learning-at-home/Main_metrics"
|
5 |
|
|
|
6 |
@simple_time_tracker(_log)
|
7 |
def get_main_metrics():
|
8 |
api = wandb.Api()
|
@@ -18,4 +20,4 @@ def get_main_metrics():
|
|
18 |
losses.append(row["loss"])
|
19 |
alive_peers.append(row["alive peers"])
|
20 |
|
21 |
-
return steps, losses, alive_peers
|
|
|
|
|
1 |
import wandb
|
2 |
|
3 |
+
from dashboard_utils.time_tracker import _log, simple_time_tracker
|
4 |
+
|
5 |
WANDB_REPO = "learning-at-home/Main_metrics"
|
6 |
|
7 |
+
|
8 |
@simple_time_tracker(_log)
|
9 |
def get_main_metrics():
|
10 |
api = wandb.Api()
|
|
|
20 |
losses.append(row["loss"])
|
21 |
alive_peers.append(row["alive peers"])
|
22 |
|
23 |
+
return steps, losses, alive_peers
|
dashboard_utils/time_tracker.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
-
from time import time
|
2 |
from functools import wraps
|
|
|
|
|
3 |
|
4 |
def simple_time_tracker(log_fun):
|
5 |
def _simple_time_tracker(fn):
|
@@ -13,16 +14,19 @@ def simple_time_tracker(log_fun):
|
|
13 |
elapsed_time = time() - start_time
|
14 |
|
15 |
# log the result
|
16 |
-
log_fun(
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
21 |
return result
|
22 |
|
23 |
return wrapped_fn
|
|
|
24 |
return _simple_time_tracker
|
25 |
|
26 |
|
27 |
def _log(message):
|
28 |
-
print(
|
|
|
|
|
1 |
from functools import wraps
|
2 |
+
from time import time
|
3 |
+
|
4 |
|
5 |
def simple_time_tracker(log_fun):
|
6 |
def _simple_time_tracker(fn):
|
|
|
14 |
elapsed_time = time() - start_time
|
15 |
|
16 |
# log the result
|
17 |
+
log_fun(
|
18 |
+
{
|
19 |
+
"function_name": fn.__name__,
|
20 |
+
"total_time": elapsed_time,
|
21 |
+
}
|
22 |
+
)
|
23 |
+
|
24 |
return result
|
25 |
|
26 |
return wrapped_fn
|
27 |
+
|
28 |
return _simple_time_tracker
|
29 |
|
30 |
|
31 |
def _log(message):
|
32 |
+
print("[SimpleTimeTracker] {function_name} {total_time:.3f}".format(**message))
|
streamlit_observable/__init__.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
import os
|
|
|
2 |
import streamlit.components.v1 as components
|
3 |
|
4 |
_RELEASE = True
|
@@ -22,18 +23,18 @@ def observable(key, notebook, targets=None, redefine={}, observe=[], hide=[]):
|
|
22 |
key: str
|
23 |
A unique string used to avoid constant re-renders to the iframe.
|
24 |
notebook: str
|
25 |
-
The observablehq.com notebook id to embed. Ex. "@"d3/bar-chart"
|
26 |
or "d/1f434ef3b0569a00"
|
27 |
targets: list or None
|
28 |
An optional list of strings that are the name of the cells to embed.
|
29 |
-
By default, the entire notebook, including unnamed cells, will be embeded.
|
30 |
observe: list or None
|
31 |
An optional list of strings that are the name of cells to observe.
|
32 |
-
Whenever these cells change value or become fulfilled, the value will
|
33 |
be passed back into Streamlit as part of the return value.
|
34 |
redefine: dict or None
|
35 |
An optional dict containing the cells you wish to redefine and the values
|
36 |
-
you wish to redefine them as. The keys are the cell names you want to
|
37 |
redefine, the values are what they will be redefined as. Keep in mind,
|
38 |
there is a serialization process from Streamlit Python -> frontend JavaScript.
|
39 |
hide: list or None
|
@@ -48,15 +49,9 @@ def observable(key, notebook, targets=None, redefine={}, observe=[], hide=[]):
|
|
48 |
|
49 |
"""
|
50 |
component_value = _component_func(
|
51 |
-
notebook=notebook,
|
52 |
-
targets=targets,
|
53 |
-
observe=observe,
|
54 |
-
redefine=redefine,
|
55 |
-
hide=hide,
|
56 |
-
key=key,
|
57 |
-
name=key
|
58 |
)
|
59 |
-
|
60 |
if component_value is None:
|
61 |
return {}
|
62 |
|
@@ -65,12 +60,12 @@ def observable(key, notebook, targets=None, redefine={}, observe=[], hide=[]):
|
|
65 |
|
66 |
# if not _RELEASE:
|
67 |
# import streamlit as st
|
68 |
-
# observers = observable("World Tour!",
|
69 |
-
# notebook="@d3/world-tour",
|
70 |
-
# targets=["canvas"],
|
71 |
# observe=["name"]
|
72 |
# )
|
73 |
-
|
74 |
# name = observers.get("name")
|
75 |
-
|
76 |
# st.write(f"Current country: ** *{name}* **")
|
|
|
1 |
import os
|
2 |
+
|
3 |
import streamlit.components.v1 as components
|
4 |
|
5 |
_RELEASE = True
|
|
|
23 |
key: str
|
24 |
A unique string used to avoid constant re-renders to the iframe.
|
25 |
notebook: str
|
26 |
+
The observablehq.com notebook id to embed. Ex. "@"d3/bar-chart"
|
27 |
or "d/1f434ef3b0569a00"
|
28 |
targets: list or None
|
29 |
An optional list of strings that are the name of the cells to embed.
|
30 |
+
By default, the entire notebook, including unnamed cells, will be embeded.
|
31 |
observe: list or None
|
32 |
An optional list of strings that are the name of cells to observe.
|
33 |
+
Whenever these cells change value or become fulfilled, the value will
|
34 |
be passed back into Streamlit as part of the return value.
|
35 |
redefine: dict or None
|
36 |
An optional dict containing the cells you wish to redefine and the values
|
37 |
+
you wish to redefine them as. The keys are the cell names you want to
|
38 |
redefine, the values are what they will be redefined as. Keep in mind,
|
39 |
there is a serialization process from Streamlit Python -> frontend JavaScript.
|
40 |
hide: list or None
|
|
|
49 |
|
50 |
"""
|
51 |
component_value = _component_func(
|
52 |
+
notebook=notebook, targets=targets, observe=observe, redefine=redefine, hide=hide, key=key, name=key
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
)
|
54 |
+
|
55 |
if component_value is None:
|
56 |
return {}
|
57 |
|
|
|
60 |
|
61 |
# if not _RELEASE:
|
62 |
# import streamlit as st
|
63 |
+
# observers = observable("World Tour!",
|
64 |
+
# notebook="@d3/world-tour",
|
65 |
+
# targets=["canvas"],
|
66 |
# observe=["name"]
|
67 |
# )
|
68 |
+
|
69 |
# name = observers.get("name")
|
70 |
+
|
71 |
# st.write(f"Current country: ** *{name}* **")
|