justheuristic commited on
Commit
9e99ac6
1 Parent(s): bd1d1aa

update meta-info

Browse files
Files changed (3) hide show
  1. app.py +3 -6
  2. st_helpers.py +1 -1
  3. static/meta.html +3 -3
app.py CHANGED
@@ -27,18 +27,15 @@ So, can individual researchers and small labs still train state-of-the-art? Yes
27
  All it takes is for a bunch of us to come together. In fact, we're doing it right now and <b>you're invited to join!</b>
28
  """, vspace_before=12)
29
 
30
- content_text("<br>")
31
 
32
  draw_current_progress()
33
 
34
  content_text(f"""
35
  The model we're training is called DALLE: a transformer "language model" that generates images from text description.
36
  We're training this model on <a href=https://laion.ai/laion-400-open-dataset/>LAION</a> - the world's largest openly available
37
- image-text-pair dataset with 400 million samples.
38
- <b>TODO</b> You see a short description of training dataset, model architecture and training configuration.
39
- In includes all necessary citations and, most importantly, a down-to-earth explanation of what exactly is dalle.
40
- It properly refers the communities that provided data, the source codebase and provides necessary links.
41
- """)
42
 
43
  content_title("How do I join?")
44
 
 
27
  All it takes is for a bunch of us to come together. In fact, we're doing it right now and <b>you're invited to join!</b>
28
  """, vspace_before=12)
29
 
30
+ st.markdown("<br>", unsafe_allow_html=True)
31
 
32
  draw_current_progress()
33
 
34
  content_text(f"""
35
  The model we're training is called DALLE: a transformer "language model" that generates images from text description.
36
  We're training this model on <a href=https://laion.ai/laion-400-open-dataset/>LAION</a> - the world's largest openly available
37
+ image-text-pair dataset with 400 million samples. Our model is based on
38
+ <a href=https://github.com/lucidrains/DALLE-pytorch>dalle-pytorch</a> with additional features for memory efficiency.""")
 
 
 
39
 
40
  content_title("How do I join?")
41
 
st_helpers.py CHANGED
@@ -37,5 +37,5 @@ CITATIONS = {}
37
 
38
 
39
  def cite(tag):
40
- CITATIONS[tag] = len(CITATIONS) + 1
41
  return f"&nbsp;[{CITATIONS[tag]}]"
 
37
 
38
 
39
  def cite(tag):
40
+ CITATIONS.setdefault(tag, len(CITATIONS) + 1)
41
  return f"&nbsp;[{CITATIONS[tag]}]"
static/meta.html CHANGED
@@ -1,5 +1,5 @@
1
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
2
- <title>learning@home</title>
3
  <meta name="description" content="A NeurIPS'21 demonstration that explains how to train large models together with multiple collaborators.">
4
  <link rel="mask-icon" href="https://learning-at-home.github.io/logo_small.png">
5
  <link rel="alternate icon" class="js-site-favicon" type="image/png" href="https://learning-at-home.github.io/logo.png">
@@ -7,7 +7,7 @@
7
  <meta property="og:url" content="https://training-transformers-together.github.io">
8
  <meta property="og:site_name" content="learning@home">
9
  <meta property="og:title" content="Train vast neural networks together">
10
- <meta property="og:description" content="A library to train large neural networks across the internet. Imagine training one huge transformer on thousands of computers from universities, companies, and volunteers.">
11
  <meta property="og:image" content="https://learning-at-home.github.io/logo_small.png">
12
  <meta property="og:image:type" content="image/png">
13
  <meta property="og:image:width" content="96">
@@ -16,6 +16,6 @@
16
  <meta property="twitter:creator" content="Yandex, Huggingface, Hivemind team & contributors">
17
  <meta property="twitter:card" content="summary_large_image">
18
  <meta property="twitter:title" content="learning@home">
19
- <meta property="twitter:description" content="Hivemind is a library to train large neural networks across the internet. Imagine training one huge transformer on thousands of computers from universities, companies, and volunteers.">
20
  <meta property="twitter:image:src" content="https://learning-at-home.github.io/logo_horizontal.png">
21
  <meta name="viewport" content="width=device-width, initial-scale=1">
 
1
  <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
2
+ <title>Training Transformers Together</title>
3
  <meta name="description" content="A NeurIPS'21 demonstration that explains how to train large models together with multiple collaborators.">
4
  <link rel="mask-icon" href="https://learning-at-home.github.io/logo_small.png">
5
  <link rel="alternate icon" class="js-site-favicon" type="image/png" href="https://learning-at-home.github.io/logo.png">
 
7
  <meta property="og:url" content="https://training-transformers-together.github.io">
8
  <meta property="og:site_name" content="learning@home">
9
  <meta property="og:title" content="Train vast neural networks together">
10
+ <meta property="og:description" content="A NeurIPS'21 demonstration that explains how to train large models together with multiple collaborators.">
11
  <meta property="og:image" content="https://learning-at-home.github.io/logo_small.png">
12
  <meta property="og:image:type" content="image/png">
13
  <meta property="og:image:width" content="96">
 
16
  <meta property="twitter:creator" content="Yandex, Huggingface, Hivemind team & contributors">
17
  <meta property="twitter:card" content="summary_large_image">
18
  <meta property="twitter:title" content="learning@home">
19
+ <meta property="twitter:description" content="A NeurIPS'21 demonstration that explains how to train large models together with multiple collaborators.">
20
  <meta property="twitter:image:src" content="https://learning-at-home.github.io/logo_horizontal.png">
21
  <meta name="viewport" content="width=device-width, initial-scale=1">