Monet Joe commited on
Commit
71f2ef3
1 Parent(s): 5da46c3

Delete hoyo_pianos.py

Browse files
Files changed (1) hide show
  1. hoyo_pianos.py +0 -110
hoyo_pianos.py DELETED
@@ -1,110 +0,0 @@
1
- import os
2
- import shutil
3
- import random
4
- import hashlib
5
- import datasets
6
- from midi2abc import midi2abc
7
-
8
-
9
- _HOMEPAGE = f"https://huggingface.co/datasets/MuGeminorum/{os.path.basename(__file__).split('.')[0]}"
10
-
11
- _CITATION = """\
12
- @dataset{mihoyo_pianos,
13
- author = {MuGeminorum Studio},
14
- title = {mihoyo game piano songs},
15
- month = {nov},
16
- year = {2023},
17
- publisher = {HF},
18
- version = {1.1},
19
- url = {https://huggingface.co/datasets/MuGeminorum/hoyo_pianos}
20
- }
21
- """
22
-
23
- _DESCRIPTION = """\
24
- This database contains mihoyo game piano songs downloaded from musescore
25
- """
26
-
27
- _URLS = {
28
- "genshin": f"{_HOMEPAGE}/resolve/main/data/genshin.zip",
29
- "starail": f"{_HOMEPAGE}/resolve/main/data/starail.zip"
30
- }
31
-
32
-
33
- class hoyo_pianos(datasets.GeneratorBasedBuilder):
34
- def _info(self):
35
- return datasets.DatasetInfo(
36
- features=datasets.Features(
37
- {
38
- "midi": datasets.Value("string"),
39
- "abc": datasets.Value("string"),
40
- "tag": datasets.Value("string")
41
- }
42
- ),
43
- supervised_keys=("abc", "tags"),
44
- homepage=_HOMEPAGE,
45
- license="mit",
46
- citation=_CITATION,
47
- description=_DESCRIPTION
48
- )
49
-
50
- def _calculate_hash(self, file_path):
51
- # 计算文件的哈希值
52
- with open(file_path, 'rb') as midi_file:
53
- content = midi_file.read()
54
- return hashlib.md5(content).hexdigest()
55
-
56
- def _rm_duplicates_in_folder(self, input_folder):
57
- # 用于存储文件哈希值的字典
58
- hash_dict = {}
59
- duplist = []
60
- # 遍历输入文件夹
61
- for root, _, files in os.walk(input_folder):
62
- for file in files:
63
- file_path = os.path.join(root, file)
64
- file_hash = self._calculate_hash(file_path)
65
-
66
- # 检查文件哈希值是否已存在
67
- if file_hash in hash_dict:
68
- print(f"Duplicates found: {file}")
69
- # 将重复文件直接删除
70
- duplist.append(file_path)
71
- shutil.rmtree(file_path)
72
- else:
73
- # 存储文件哈希值
74
- hash_dict[file_hash] = file_path
75
-
76
- return duplist
77
-
78
- def _split_generators(self, dl_manager):
79
- dataset = []
80
- for key in _URLS.keys():
81
- data_files = dl_manager.download_and_extract(_URLS[key])
82
- files = dl_manager.iter_files([data_files])
83
- subset = []
84
-
85
- extract_dir = f'{data_files}\\{key}'
86
- duplist = self._rm_duplicates_in_folder(extract_dir)
87
-
88
- for path in files:
89
- if (not path in duplist) and (os.path.basename(path).endswith(".mid")):
90
- subset.append(path)
91
-
92
- random.shuffle(subset)
93
- dataset.append(
94
- datasets.SplitGenerator(
95
- name=key,
96
- gen_kwargs={
97
- "files": subset
98
- }
99
- )
100
- )
101
-
102
- return dataset
103
-
104
- def _generate_examples(self, files):
105
- for i, path in enumerate(files):
106
- yield i, {
107
- "midi": path,
108
- "abc": midi2abc(path),
109
- "tag": os.path.basename(path)[:-4].encode('cp437').decode('gbk')
110
- }