Spaces:
Runtime error
Runtime error
victormiller
commited on
Commit
•
9f87a47
1
Parent(s):
3fb75d0
Update curated.py
Browse files- curated.py +42 -0
curated.py
CHANGED
@@ -13,6 +13,47 @@ copyright_disclaimer = P("We respect the copyright of the data sources and have
|
|
13 |
|
14 |
local_dedup_text = P("Each curated data source has been prepared using its specific rules and has been locally deduped using min-hash near deduplication. Details about the dataset are shown below in the table:")
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
data_pipeline_table = pd.DataFrame(
|
17 |
{
|
18 |
"Data Source": [
|
@@ -431,6 +472,7 @@ def curated(request):
|
|
431 |
H2("Curated Sources: Overview"),
|
432 |
overview_text,
|
433 |
copyright_disclaimer,
|
|
|
434 |
table_desc,
|
435 |
H2("Curated Sources: Data Gathering and Filtering"),
|
436 |
H3("Data Acquisition"),
|
|
|
13 |
|
14 |
local_dedup_text = P("Each curated data source has been prepared using its specific rules and has been locally deduped using min-hash near deduplication. Details about the dataset are shown below in the table:")
|
15 |
|
16 |
+
|
17 |
+
treemap_data = {
|
18 |
+
'Source': ['ArXiv', 'PubMed Central', 'PubMed Abstract', 'S2ORC Full Text', 'S2ORC Abstract', 'PhilPapers', 'Wikipedia', 'StackExchange', 'EuroParl', 'Ubuntu IRC', 'Freelaw', 'PG19', 'USPTO', 'HackerNews', 'DM Maths'],
|
19 |
+
'Category': ['Papers', 'Papers', 'Papers', 'Papers', 'Papers', 'Papers', 'Internet', 'Conversational', 'Legal/Formal', 'Conversational', 'Legal/Formal', 'Books', 'Legal/Formal', 'Conversational', 'Reasoning'],
|
20 |
+
'Count': [100, 200, 150, 120, 80, 90, 300, 250, 180, 150, 150, 250, 180, 120, 90],
|
21 |
+
'Details': [
|
22 |
+
'A repository of scientific papers in various disciplines, including computer science, physics, mathematics, and more.',
|
23 |
+
'A database of biomedical and life sciences research articles.',
|
24 |
+
'Abstracts of biomedical literature from various sources.',
|
25 |
+
'Full-text articles from the Semantic Scholar Open Research Corpus.',
|
26 |
+
'Abstracts of articles from the Semantic Scholar Open Research Corpus.',
|
27 |
+
'Papers from the PhilPapers database, a comprehensive index and bibliography of philosophy research.',
|
28 |
+
'A collaborative online encyclopedia that covers a wide range of topics.',
|
29 |
+
'A network of question-and-answer websites on various subjects, including programming, science, mathematics, and more.',
|
30 |
+
'A collection of multilingual parallel corpora of parliamentary debates from the European Parliament.',
|
31 |
+
'Chat logs from the Ubuntu Internet Relay Chat (IRC) channels.',
|
32 |
+
'Legal documents and court cases from various jurisdictions.',
|
33 |
+
'A collection of books from Project Gutenberg, a digital library of public domain works.',
|
34 |
+
'Patent documents from the United States Patent and Trademark Office.',
|
35 |
+
'User-generated news and discussion platform focused on technology and startups.',
|
36 |
+
'Deep Mind Maths dataset with generated questions.'
|
37 |
+
]
|
38 |
+
}
|
39 |
+
# Calculate percentage for each data source
|
40 |
+
total_count = sum(treemap_data['Count'])
|
41 |
+
treemap_data['Percentage'] = [count / total_count * 100 for count in treemap_data['Count']]
|
42 |
+
|
43 |
+
# Create treemap
|
44 |
+
fig = px.treemap(treemap_data, path=['Category', 'Source'], values='Count', hover_data=['Details', 'Percentage'], hover_name='Source')
|
45 |
+
|
46 |
+
# Set the size of the chart
|
47 |
+
|
48 |
+
|
49 |
+
# Display treemap
|
50 |
+
treemap_chart = fig.update_layout(width=800, height=600)
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
data_pipeline_table = pd.DataFrame(
|
58 |
{
|
59 |
"Data Source": [
|
|
|
472 |
H2("Curated Sources: Overview"),
|
473 |
overview_text,
|
474 |
copyright_disclaimer,
|
475 |
+
treemap_chart,
|
476 |
table_desc,
|
477 |
H2("Curated Sources: Data Gathering and Filtering"),
|
478 |
H3("Data Acquisition"),
|