Datasets:
Tasks:
Text Generation
Modalities:
Text
Sub-tasks:
language-modeling
Languages:
English
Size:
100K - 1M
License:
from bs4 import BeautifulSoup as bs | |
import os | |
import wikipediaapi | |
import sys | |
import re | |
import pypandoc | |
import json | |
from pathlib import Path | |
from fetch_books_and_formal import _download_with_progress_bar | |
from fetch_mathoverflow import batch_loader | |
def page_titles_of_category(cat_page): | |
""" | |
recursively | |
""" | |
titles = [] | |
for member in cat_page.categorymembers.values(): | |
if member.ns == wikipediaapi.Namespace.MAIN: | |
titles.append(member.title) | |
elif member.ns == wikipediaapi.Namespace.CATEGORY: | |
titles += page_titles_of_category(member) | |
return titles | |
def wikipedia(): | |
""" | |
this doesnt work dont run it | |
""" | |
init_categories = [ | |
#"Category:Mathematical_theorems", | |
"Category:Mathematical_proofs", | |
#"Category:Mathematical_examples", | |
#"Category:Mathematical_problems", | |
#"Category:Mathematical_terminology", | |
] | |
title_set = set() | |
for cat_name in init_categories: | |
print(cat_name + "...") | |
title_set = title_set.union(page_titles_of_category(wiki.page(cat_name))) | |
PROOFWIKI_URL = ( | |
"https://zenodo.org/record/4902289/files/naturalproofs_proofwiki.json?download=1" | |
) | |
def proofwiki(testing=False): | |
save_dir = "wiki/proofwiki" | |
Path(save_dir).mkdir(parents=True, exist_ok=True) | |
if testing: | |
with open("naturalproofs/proofwiki.json") as f: | |
struct = json.load(f) | |
else: | |
print("DOWNLOADING PROOFWIKI") | |
resp = _download_with_progress_bar(PROOFWIKI_URL) | |
struct = json.loads(resp.decode("utf-8")) | |
print("DONE DOWNLOADING PROOFWIKI") | |
batches = batch_loader(struct["dataset"]["theorems"], 500) | |
for i, batch in enumerate(batches): | |
thms_list = [] | |
for thm in batch: | |
if thm["contents"]: | |
thm_string = "\\section{" + thm["label"] + "}\n" | |
thm_string += ( | |
"Tags: " + ", ".join(thm["categories"]).replace("/", ": ") + "\n\n" | |
) | |
thm_string += ( | |
"\\begin{theorem}\n" | |
+ "\n".join(thm["contents"]) | |
+ "\n\\end{theorem}\n\n" | |
) | |
for proof in thm["proofs"]: | |
thm_string += ( | |
"\\begin{proof}\n" | |
+ "\n".join(proof["contents"]) | |
+ "\n\\end{proof}\n\n" | |
) | |
thms_list.append(thm_string.strip()) | |
with open(os.path.join(save_dir, f"shard_{i}.txt"), "w") as f: | |
f.write("<|endoftext|>\n".join(thms_list)) | |
defn_strings = [] | |
for defn in struct["dataset"]["definitions"]: | |
if defn["contents"]: | |
defn_strings.append(( | |
"\\begin{definition}[" | |
+ defn["label"] | |
+ "]\n" | |
+ "\n".join(defn["contents"]) | |
+ "\n\\end{definition}").strip() | |
) | |
with open(os.path.join(save_dir, "defs.txt"), "w") as f: | |
f.write("<|endoftext|>\n".join(defn_strings)) | |
if __name__=="__main__": | |
#wikipedia() | |
proofwiki() | |