Datasets:
License:
import os | |
import shutil | |
import hashlib | |
from tqdm import tqdm | |
def calculate_hash(file_path): | |
# 计算文件的哈希值 | |
with open(file_path, 'rb') as midi_file: | |
content = midi_file.read() | |
return hashlib.md5(content).hexdigest() | |
def mv_duplicates_to_folder(input_folder, output_folder): | |
# 创建重复文件夹 | |
if not os.path.exists(output_folder): | |
os.makedirs(output_folder) | |
# 用于存储文件哈希值的字典 | |
hash_dict = {} | |
# 遍历输入文件夹 | |
for root, _, files in os.walk(input_folder): | |
for file in tqdm(files, desc='Removing duplicates'): | |
file_path = os.path.join(root, file) | |
file_hash = calculate_hash(file_path) | |
# 检查文件哈希值是否已存在 | |
if file_hash in hash_dict: | |
print(f"Duplicates found: {file}") | |
# 将重复文件移动到重复文件夹 | |
destination_path = os.path.join(output_folder, file) | |
shutil.move(file_path, destination_path) | |
else: | |
# 存储文件哈希值 | |
hash_dict[file_hash] = file_path | |
def rm_duplicates_in_folder(input_folder): | |
# 用于存储文件哈希值的字典 | |
hash_dict = {} | |
duplist = [] | |
# 遍历输入文件夹 | |
for root, _, files in os.walk(input_folder): | |
for file in tqdm(files, desc='Removing duplicates'): | |
file_path = os.path.join(root, file) | |
file_hash = calculate_hash(file_path) | |
# 检查文件哈希值是否已存在 | |
if file_hash in hash_dict: | |
print(f"Duplicates found: {file}") | |
# 将重复文件直接删除 | |
duplist.append(file_path) | |
shutil.rmtree(file_path) | |
else: | |
# 存储文件哈希值 | |
hash_dict[file_hash] = file_path | |
return duplist | |
if __name__ == "__main__": | |
mv_duplicates_to_folder(input_folder="data", output_folder="duplicates") | |