|
import time, os, json |
|
from tqdm import tqdm |
|
from curl_cffi import requests |
|
import concurrent.futures |
|
from pathlib import Path |
|
import tarfile |
|
from huggingface_hub import HfApi |
|
|
|
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1" |
|
|
|
def main(): |
|
notexistsid = {} |
|
yandere_data = json.loads(Path("posts.json").read_text()) |
|
for item in tqdm(yandere_data, desc="Processing yandere", ascii=True): |
|
file_id = item["id"] |
|
modula = int(file_id) % 1000 |
|
cutoff = str(modula).zfill(4) |
|
if cutoff not in notexistsid: |
|
notexistsid[cutoff] = [] |
|
notexistsid[cutoff].append((file_id, cutoff, item["file_url"])) |
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as tar_executor: |
|
for key, group in notexistsid.items(): |
|
if key in [ |
|
'0862', '0863', '0865', '0867', '0869', |
|
'0868', '0870', '0871', '0873', |
|
'0874', '0876', '0877', '0878', '0879', |
|
'0880', '0881', '0882', '0883', '0884', '0885', |
|
'0886', '0887', '0889', '0890', '0891', |
|
'0892', '0893', '0894', '0896', '0897', '0898', |
|
'0899', '0900', '0901', '0902', '0903' |
|
]: |
|
continue |
|
keybar = tqdm(desc=f"Downloading files in key={key}", total=len(group), position=1, ascii=True, leave=False) |
|
os.makedirs(f"yandere/{key}/", exist_ok=True) |
|
ok = False |
|
while not ok: |
|
with concurrent.futures.ThreadPoolExecutor(max_workers=12) as executor: |
|
for file_id, cutoff, file_url in group: |
|
executor.submit(download, file_id, cutoff, file_url, keybar) |
|
ok = True |
|
for file_id, cutoff, file_url in group: |
|
suffix = Path(file_url).suffix |
|
if file_url != "" and not Path(f"yandere/{cutoff}/{file_id}{suffix}").is_file(): |
|
ok = False |
|
tar_executor.submit(archive_and_upload, Path(f"yandere/{cutoff}"), cutoff) |
|
print(f"Finished download group {cutoff}") |
|
keybar.close() |
|
|
|
def rm_tree(pth: Path): |
|
for child in pth.iterdir(): |
|
if child.is_file(): |
|
child.unlink() |
|
else: |
|
rm_tree(child) |
|
pth.rmdir() |
|
|
|
def archive_and_upload(dirname, name): |
|
tar_name = Path("yandere-tars") / f"data-{name}.tar" |
|
|
|
if not os.path.isdir(dirname): |
|
print("The specified directory does not exist.") |
|
return |
|
|
|
|
|
print(f"Creating {tar_name}") |
|
with tarfile.open(tar_name, "w") as tar: |
|
|
|
for root, dirs, files in os.walk(dirname): |
|
|
|
for file in tqdm(sorted(files), desc=f"Creating {tar_name}", ascii=True): |
|
full_path = os.path.join(root, file) |
|
|
|
tar.add(full_path, arcname=file) |
|
|
|
|
|
rm_tree(dirname) |
|
print(f"The directory {dirname} has been removed.") |
|
|
|
api = HfApi() |
|
print(api.upload_file( |
|
path_or_fileobj=tar_name, |
|
path_in_repo=f"original/data-{name}.tar", |
|
repo_id="nyanko7/yandere2023", |
|
repo_type="dataset", |
|
)) |
|
Path(tar_name).unlink() |
|
|
|
def download(idx, cutoff, file_url, bar): |
|
suffix = Path(file_url).suffix |
|
max_attempts = 5 |
|
|
|
for attempt in range(max_attempts): |
|
try: |
|
r = requests.get(file_url, impersonate="chrome110", timeout=120) |
|
if r.status_code == 200: |
|
with open(f"yandere/{cutoff}/{idx}{suffix}", "wb") as f: |
|
f.write(r.content) |
|
break |
|
else: |
|
print(f"Attempt {attempt+1} failed to download {file_url}: error {r.status_code}") |
|
except Exception as e: |
|
print(f"Attempt {attempt+1} failed to download {file_url}: error {e}") |
|
|
|
time.sleep(1) |
|
if attempt+1 == max_attempts: |
|
print(f"Failed to download {file_url} after {max_attempts} attempts.") |
|
bar.update(1) |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |
|
|