r3gm commited on
Commit
6392bc1
1 Parent(s): 2fe2afd

Upload 6 files

Browse files
Applio-RVC-Fork/utils/README.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # External Colab Code
2
+ Code used to make Google Colab work correctly
3
+ - Repo link: https://github.com/IAHispano/Applio-RVC-Fork/
4
+
5
+ Thanks to https://github.com/kalomaze/externalcolabcode
6
+
Applio-RVC-Fork/utils/backups.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import hashlib
4
+ import time
5
+ import base64
6
+
7
+
8
+
9
+
10
+ LOGS_FOLDER = '/content/Applio-RVC-Fork/logs'
11
+ WEIGHTS_FOLDER = '/content/Applio-RVC-Fork/weights'
12
+ GOOGLE_DRIVE_PATH = '/content/drive/MyDrive/RVC_Backup'
13
+
14
+ def import_google_drive_backup():
15
+ print("Importing Google Drive backup...")
16
+ weights_exist = False
17
+ for root, dirs, files in os.walk(GOOGLE_DRIVE_PATH):
18
+ for filename in files:
19
+ filepath = os.path.join(root, filename)
20
+ if os.path.isfile(filepath) and not filepath.startswith(os.path.join(GOOGLE_DRIVE_PATH, 'weights')):
21
+ backup_filepath = os.path.join(LOGS_FOLDER, os.path.relpath(filepath, GOOGLE_DRIVE_PATH))
22
+ backup_folderpath = os.path.dirname(backup_filepath)
23
+ if not os.path.exists(backup_folderpath):
24
+ os.makedirs(backup_folderpath)
25
+ print(f'Created backup folder: {backup_folderpath}', flush=True)
26
+ shutil.copy2(filepath, backup_filepath) # copy file with metadata
27
+ print(f'Imported file from Google Drive backup: {filename}')
28
+ elif filepath.startswith(os.path.join(GOOGLE_DRIVE_PATH, 'weights')) and filename.endswith('.pth'):
29
+ weights_exist = True
30
+ weights_filepath = os.path.join(WEIGHTS_FOLDER, os.path.relpath(filepath, os.path.join(GOOGLE_DRIVE_PATH, 'weights')))
31
+ weights_folderpath = os.path.dirname(weights_filepath)
32
+ if not os.path.exists(weights_folderpath):
33
+ os.makedirs(weights_folderpath)
34
+ print(f'Created weights folder: {weights_folderpath}', flush=True)
35
+ shutil.copy2(filepath, weights_filepath) # copy file with metadata
36
+ print(f'Imported file from weights: {filename}')
37
+ if weights_exist:
38
+ print("Copied weights from Google Drive backup to local weights folder.")
39
+ else:
40
+ print("No weights found in Google Drive backup.")
41
+ print("Google Drive backup import completed.")
42
+
43
+ def get_md5_hash(file_path):
44
+ hash_md5 = hashlib.md5()
45
+ with open(file_path, "rb") as f:
46
+ for chunk in iter(lambda: f.read(4096), b""):
47
+ hash_md5.update(chunk)
48
+ return hash_md5.hexdigest()
49
+
50
+ def copy_weights_folder_to_drive():
51
+ destination_folder = os.path.join(GOOGLE_DRIVE_PATH, 'weights')
52
+ try:
53
+ if not os.path.exists(destination_folder):
54
+ os.makedirs(destination_folder)
55
+
56
+ num_copied = 0
57
+ for filename in os.listdir(WEIGHTS_FOLDER):
58
+ if filename.endswith('.pth'):
59
+ source_file = os.path.join(WEIGHTS_FOLDER, filename)
60
+ destination_file = os.path.join(destination_folder, filename)
61
+ if not os.path.exists(destination_file):
62
+ shutil.copy2(source_file, destination_file)
63
+ num_copied += 1
64
+ print(f"Copied {filename} to Google Drive!")
65
+
66
+ if num_copied == 0:
67
+ print("No new finished models found for copying.")
68
+ else:
69
+ print(f"Finished copying {num_copied} files to Google Drive!")
70
+
71
+ except Exception as e:
72
+ print(f"An error occurred while copying weights: {str(e)}")
73
+ # You can log the error or take appropriate actions here.
74
+
75
+ def backup_files():
76
+ print("\nStarting backup loop...")
77
+ last_backup_timestamps_path = os.path.join(LOGS_FOLDER, 'last_backup_timestamps.txt')
78
+ fully_updated = False # boolean to track if all files are up to date
79
+
80
+ while True:
81
+ try:
82
+ updated = False # flag to check if any files were updated
83
+ last_backup_timestamps = {}
84
+
85
+ try:
86
+ with open(last_backup_timestamps_path, 'r') as f:
87
+ last_backup_timestamps = dict(line.strip().split(':') for line in f)
88
+ except FileNotFoundError:
89
+ pass # File does not exist yet, which is fine
90
+
91
+ for root, dirs, files in os.walk(LOGS_FOLDER):
92
+ for filename in files:
93
+ if filename != 'last_backup_timestamps.txt':
94
+ filepath = os.path.join(root, filename)
95
+ if os.path.isfile(filepath):
96
+ backup_filepath = os.path.join(GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER))
97
+ backup_folderpath = os.path.dirname(backup_filepath)
98
+ if not os.path.exists(backup_folderpath):
99
+ os.makedirs(backup_folderpath)
100
+ print(f'Created backup folder: {backup_folderpath}', flush=True)
101
+ # check if file has changed since last backup
102
+ last_backup_timestamp = last_backup_timestamps.get(filepath)
103
+ current_timestamp = os.path.getmtime(filepath)
104
+ if last_backup_timestamp is None or float(last_backup_timestamp) < current_timestamp:
105
+ shutil.copy2(filepath, backup_filepath) # copy file with metadata
106
+ last_backup_timestamps[filepath] = str(current_timestamp) # update last backup timestamp
107
+ if last_backup_timestamp is None:
108
+ print(f'Backed up file: {filename}')
109
+ else:
110
+ print(f'Updating backed up file: {filename}')
111
+ updated = True
112
+ fully_updated = False # if a file is updated, all files are not up to date
113
+
114
+ # check if any files were deleted in Colab and delete them from the backup drive
115
+ for filepath in list(last_backup_timestamps.keys()):
116
+ if not os.path.exists(filepath):
117
+ backup_filepath = os.path.join(GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER))
118
+ if os.path.exists(backup_filepath):
119
+ os.remove(backup_filepath)
120
+ print(f'Deleted file: {filepath}')
121
+ del last_backup_timestamps[filepath]
122
+ updated = True
123
+ fully_updated = False # if a file is deleted, all files are not up to date
124
+
125
+ if not updated and not fully_updated:
126
+ print("Files are up to date.")
127
+ fully_updated = True # if all files are up to date, set the boolean to True
128
+ copy_weights_folder_to_drive()
129
+ sleep_time = 15
130
+ else:
131
+ sleep_time = 0.1
132
+
133
+ with open(last_backup_timestamps_path, 'w') as f:
134
+ for filepath, timestamp in last_backup_timestamps.items():
135
+ f.write(f'{filepath}:{timestamp}\n')
136
+
137
+ time.sleep(sleep_time) # wait for 15 seconds before checking again, or 0.1s if not fully up to date to speed up backups
138
+
139
+ except Exception as e:
140
+ print(f"An error occurred: {str(e)}")
141
+ # You can log the error or take appropriate actions here.
Applio-RVC-Fork/utils/backups_test.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os
3
+ import shutil
4
+ import hashlib
5
+ import time
6
+
7
+ LOGS_FOLDER = '/content/Applio-RVC-Fork/logs'
8
+ WEIGHTS_FOLDER = '/content/Applio-RVC-Fork/weights'
9
+ GOOGLE_DRIVE_PATH = '/content/drive/MyDrive/RVC_Backup'
10
+
11
+ def import_google_drive_backup():
12
+ print("Importing Google Drive backup...")
13
+ GOOGLE_DRIVE_PATH = '/content/drive/MyDrive/RVC_Backup' # change this to your Google Drive path
14
+ LOGS_FOLDER = '/content/Applio-RVC-Fork/logs'
15
+ WEIGHTS_FOLDER = '/content/Applio-RVC-Fork/weights'
16
+ weights_exist = False
17
+ files_to_copy = []
18
+ weights_to_copy = []
19
+
20
+ def handle_files(root, files, is_weight_files=False):
21
+ for filename in files:
22
+ filepath = os.path.join(root, filename)
23
+ if filename.endswith('.pth') and is_weight_files:
24
+ weights_exist = True
25
+ backup_filepath = os.path.join(WEIGHTS_FOLDER, os.path.relpath(filepath, GOOGLE_DRIVE_PATH))
26
+ else:
27
+ backup_filepath = os.path.join(LOGS_FOLDER, os.path.relpath(filepath, GOOGLE_DRIVE_PATH))
28
+ backup_folderpath = os.path.dirname(backup_filepath)
29
+ if not os.path.exists(backup_folderpath):
30
+ os.makedirs(backup_folderpath)
31
+ print(f'Created folder: {backup_folderpath}', flush=True)
32
+ if is_weight_files:
33
+ weights_to_copy.append((filepath, backup_filepath))
34
+ else:
35
+ files_to_copy.append((filepath, backup_filepath))
36
+
37
+ for root, dirs, files in os.walk(os.path.join(GOOGLE_DRIVE_PATH, 'logs')):
38
+ handle_files(root, files)
39
+
40
+ for root, dirs, files in os.walk(os.path.join(GOOGLE_DRIVE_PATH, 'weights')):
41
+ handle_files(root, files, True)
42
+
43
+ # Copy files in batches
44
+ total_files = len(files_to_copy)
45
+ start_time = time.time()
46
+ for i, (source, dest) in enumerate(files_to_copy, start=1):
47
+ with open(source, 'rb') as src, open(dest, 'wb') as dst:
48
+ shutil.copyfileobj(src, dst, 1024*1024) # 1MB buffer size
49
+ # Report progress every 5 seconds or after every 100 files, whichever is less frequent
50
+ if time.time() - start_time > 5 or i % 100 == 0:
51
+ print(f'\rCopying file {i} of {total_files} ({i * 100 / total_files:.2f}%)', end="")
52
+ start_time = time.time()
53
+ print(f'\nImported {len(files_to_copy)} files from Google Drive backup')
54
+
55
+ # Copy weights in batches
56
+ total_weights = len(weights_to_copy)
57
+ start_time = time.time()
58
+ for i, (source, dest) in enumerate(weights_to_copy, start=1):
59
+ with open(source, 'rb') as src, open(dest, 'wb') as dst:
60
+ shutil.copyfileobj(src, dst, 1024*1024) # 1MB buffer size
61
+ # Report progress every 5 seconds or after every 100 files, whichever is less frequent
62
+ if time.time() - start_time > 5 or i % 100 == 0:
63
+ print(f'\rCopying weight file {i} of {total_weights} ({i * 100 / total_weights:.2f}%)', end="")
64
+ start_time = time.time()
65
+ if weights_exist:
66
+ print(f'\nImported {len(weights_to_copy)} weight files')
67
+ print("Copied weights from Google Drive backup to local weights folder.")
68
+ else:
69
+ print("\nNo weights found in Google Drive backup.")
70
+ print("Google Drive backup import completed.")
71
+
72
+ def backup_files():
73
+ print("\n Starting backup loop...")
74
+ last_backup_timestamps_path = os.path.join(LOGS_FOLDER, 'last_backup_timestamps.txt')
75
+ fully_updated = False # boolean to track if all files are up to date
76
+ try:
77
+ with open(last_backup_timestamps_path, 'r') as f:
78
+ last_backup_timestamps = dict(line.strip().split(':') for line in f)
79
+ except:
80
+ last_backup_timestamps = {}
81
+
82
+ while True:
83
+ updated = False
84
+ files_to_copy = []
85
+ files_to_delete = []
86
+
87
+ for root, dirs, files in os.walk(LOGS_FOLDER):
88
+ for filename in files:
89
+ if filename != 'last_backup_timestamps.txt':
90
+ filepath = os.path.join(root, filename)
91
+ if os.path.isfile(filepath):
92
+ backup_filepath = os.path.join(GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER))
93
+ backup_folderpath = os.path.dirname(backup_filepath)
94
+
95
+ if not os.path.exists(backup_folderpath):
96
+ os.makedirs(backup_folderpath)
97
+ print(f'Created backup folder: {backup_folderpath}', flush=True)
98
+
99
+ # check if file has changed since last backup
100
+ last_backup_timestamp = last_backup_timestamps.get(filepath)
101
+ current_timestamp = os.path.getmtime(filepath)
102
+ if last_backup_timestamp is None or float(last_backup_timestamp) < current_timestamp:
103
+ files_to_copy.append((filepath, backup_filepath)) # add to list of files to copy
104
+ last_backup_timestamps[filepath] = str(current_timestamp) # update last backup timestamp
105
+ updated = True
106
+ fully_updated = False # if a file is updated, all files are not up to date
107
+
108
+ # check if any files were deleted in Colab and delete them from the backup drive
109
+ for filepath in list(last_backup_timestamps.keys()):
110
+ if not os.path.exists(filepath):
111
+ backup_filepath = os.path.join(GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER))
112
+ if os.path.exists(backup_filepath):
113
+ files_to_delete.append(backup_filepath) # add to list of files to delete
114
+ del last_backup_timestamps[filepath]
115
+ updated = True
116
+ fully_updated = False # if a file is deleted, all files are not up to date
117
+
118
+ # Copy files in batches
119
+ if files_to_copy:
120
+ for source, dest in files_to_copy:
121
+ shutil.copy2(source, dest)
122
+ print(f'Copied or updated {len(files_to_copy)} files')
123
+
124
+ # Delete files in batches
125
+ if files_to_delete:
126
+ for file in files_to_delete:
127
+ os.remove(file)
128
+ print(f'Deleted {len(files_to_delete)} files')
129
+
130
+ if not updated and not fully_updated:
131
+ print("Files are up to date.")
132
+ fully_updated = True # if all files are up to date, set the boolean to True
133
+ copy_weights_folder_to_drive()
134
+
135
+ with open(last_backup_timestamps_path, 'w') as f:
136
+ for filepath, timestamp in last_backup_timestamps.items():
137
+ f.write(f'{filepath}:{timestamp}\n')
138
+ time.sleep(15) # wait for 15 seconds before checking again
Applio-RVC-Fork/utils/clonerepo_experimental.py ADDED
@@ -0,0 +1,253 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import subprocess
3
+ import shutil
4
+ from concurrent.futures import ThreadPoolExecutor, as_completed
5
+ from tqdm.notebook import tqdm
6
+ from pathlib import Path
7
+ import requests
8
+
9
+ def run_script():
10
+ def run_cmd(cmd):
11
+ process = subprocess.run(cmd, shell=True, check=True, text=True)
12
+ return process.stdout
13
+
14
+ # Change the current directory to /content/
15
+ os.chdir('/content/')
16
+ print("Changing dir to /content/")
17
+
18
+ # Your function to edit the file
19
+ def edit_file(file_path):
20
+ temp_file_path = "/tmp/temp_file.py"
21
+ changes_made = False
22
+ with open(file_path, "r") as file, open(temp_file_path, "w") as temp_file:
23
+ previous_line = ""
24
+ second_previous_line = ""
25
+ for line in file:
26
+ new_line = line.replace("value=160", "value=128")
27
+ if new_line != line:
28
+ print("Replaced 'value=160' with 'value=128'")
29
+ changes_made = True
30
+ line = new_line
31
+
32
+ new_line = line.replace("crepe hop length: 160", "crepe hop length: 128")
33
+ if new_line != line:
34
+ print("Replaced 'crepe hop length: 160' with 'crepe hop length: 128'")
35
+ changes_made = True
36
+ line = new_line
37
+
38
+ new_line = line.replace("value=0.88", "value=0.75")
39
+ if new_line != line:
40
+ print("Replaced 'value=0.88' with 'value=0.75'")
41
+ changes_made = True
42
+ line = new_line
43
+
44
+ if "label=i18n(\"输入源音量包络替换输出音量包络融合比例,越靠近1越使用输出包络\")" in previous_line and "value=1," in line:
45
+ new_line = line.replace("value=1,", "value=0.25,")
46
+ if new_line != line:
47
+ print("Replaced 'value=1,' with 'value=0.25,' based on the condition")
48
+ changes_made = True
49
+ line = new_line
50
+
51
+ if "label=i18n(\"总训练轮数total_epoch\")" in previous_line and "value=20," in line:
52
+ new_line = line.replace("value=20,", "value=500,")
53
+ if new_line != line:
54
+ print("Replaced 'value=20,' with 'value=500,' based on the condition for DEFAULT EPOCH")
55
+ changes_made = True
56
+ line = new_line
57
+
58
+ if 'choices=["pm", "harvest", "dio", "crepe", "crepe-tiny", "mangio-crepe", "mangio-crepe-tiny"], # Fork Feature. Add Crepe-Tiny' in previous_line:
59
+ if 'value="pm",' in line:
60
+ new_line = line.replace('value="pm",', 'value="mangio-crepe",')
61
+ if new_line != line:
62
+ print("Replaced 'value=\"pm\",' with 'value=\"mangio-crepe\",' based on the condition")
63
+ changes_made = True
64
+ line = new_line
65
+
66
+ new_line = line.replace('label=i18n("输入训练文件夹路径"), value="E:\\\\语音音频+标注\\\\米津玄师\\\\src"', 'label=i18n("输入训练文件夹路径"), value="/content/dataset/"')
67
+ if new_line != line:
68
+ print("Replaced 'label=i18n(\"输入训练文件夹路径\"), value=\"E:\\\\语音音频+标注\\\\米津玄师\\\\src\"' with 'label=i18n(\"输入训练文件夹路径\"), value=\"/content/dataset/\"'")
69
+ changes_made = True
70
+ line = new_line
71
+
72
+ if 'label=i18n("是否仅保存最新的ckpt文件以节省硬盘空间"),' in second_previous_line:
73
+ if 'value=i18n("否"),' in line:
74
+ new_line = line.replace('value=i18n("否"),', 'value=i18n("是"),')
75
+ if new_line != line:
76
+ print("Replaced 'value=i18n(\"否\"),' with 'value=i18n(\"是\"),' based on the condition for SAVE ONLY LATEST")
77
+ changes_made = True
78
+ line = new_line
79
+
80
+ if 'label=i18n("是否在每次保存时间点将最终小模型保存至weights文件夹"),' in second_previous_line:
81
+ if 'value=i18n("否"),' in line:
82
+ new_line = line.replace('value=i18n("否"),', 'value=i18n("是"),')
83
+ if new_line != line:
84
+ print("Replaced 'value=i18n(\"否\"),' with 'value=i18n(\"是\"),' based on the condition for SAVE SMALL WEIGHTS")
85
+ changes_made = True
86
+ line = new_line
87
+
88
+ temp_file.write(line)
89
+ second_previous_line = previous_line
90
+ previous_line = line
91
+
92
+ # After finished, we replace the original file with the temp one
93
+ import shutil
94
+ shutil.move(temp_file_path, file_path)
95
+
96
+ if changes_made:
97
+ print("Changes made and file saved successfully.")
98
+ else:
99
+ print("No changes were needed.")
100
+
101
+ # Define the repo path
102
+ repo_path = '/content/Applio-RVC-Fork'
103
+
104
+ def copy_all_files_in_directory(src_dir, dest_dir):
105
+ # Iterate over all files in source directory
106
+ for item in Path(src_dir).glob('*'):
107
+ if item.is_file():
108
+ # Copy each file to destination directory
109
+ shutil.copy(item, dest_dir)
110
+ else:
111
+ # If it's a directory, make a new directory in the destination and copy the files recursively
112
+ new_dest = Path(dest_dir) / item.name
113
+ new_dest.mkdir(exist_ok=True)
114
+ copy_all_files_in_directory(str(item), str(new_dest))
115
+
116
+ def clone_and_copy_repo(repo_path):
117
+ # New repository link
118
+ new_repo_link = "https://github.com/IAHispano/Applio-RVC-Fork/"
119
+ # Temporary path to clone the repository
120
+ temp_repo_path = "/content/temp_Applio-RVC-Fork"
121
+ # New folder name
122
+ new_folder_name = "Applio-RVC-Fork"
123
+
124
+ # Clone the latest code from the new repository to a temporary location
125
+ run_cmd(f"git clone {new_repo_link} {temp_repo_path}")
126
+ os.chdir(temp_repo_path)
127
+
128
+ run_cmd(f"git checkout 3fa4dad3d8961e5ca2522e9e12c0b4ddb71ad402")
129
+ run_cmd(f"git checkout f9e606c279cb49420597519b0a83b92be81e42e4")
130
+ run_cmd(f"git checkout 9e305588844c5442d58add1061b29beeca89d679")
131
+ run_cmd(f"git checkout bf92dc1eb54b4f28d6396a4d1820a25896cc9af8")
132
+ run_cmd(f"git checkout c3810e197d3cb98039973b2f723edf967ecd9e61")
133
+ run_cmd(f"git checkout a33159efd134c2413b0afe26a76b7dc87926d2de")
134
+ run_cmd(f"git checkout 24e251fb62c662e39ac5cf9253cc65deb9be94ec")
135
+ run_cmd(f"git checkout ad5667d3017e93232dba85969cddac1322ba2902")
136
+ run_cmd(f"git checkout ce9715392cf52dd5a0e18e00d1b5e408f08dbf27")
137
+ run_cmd(f"git checkout 7c7da3f2ac68f3bd8f3ad5ca5c700f18ab9f90eb")
138
+ run_cmd(f"git checkout 4ac395eab101955e8960b50d772c26f592161764")
139
+ run_cmd(f"git checkout b15b358702294c7375761584e5276c811ffab5e8")
140
+ run_cmd(f"git checkout 1501793dc490982db9aca84a50647764caa66e51")
141
+ run_cmd(f"git checkout 21f7faf57219c75e6ba837062350391a803e9ae2")
142
+ run_cmd(f"git checkout b5eb689fbc409b49f065a431817f822f554cebe7")
143
+ run_cmd(f"git checkout 7e02fae1ebf24cb151bf6cbe787d06734aa65862")
144
+ run_cmd(f"git checkout 6aea5ea18ed0b9a1e03fa5d268d6bc3c616672a9")
145
+ run_cmd(f"git checkout f0f9b25717e59116473fb42bd7f9252cfc32b398")
146
+ run_cmd(f"git checkout b394de424088a81fc081224bc27338a8651ad3b2")
147
+ run_cmd(f"git checkout f1999406a88b80c965d2082340f5ea2bfa9ab67a")
148
+ run_cmd(f"git checkout d98a0fa8dc715308dfc73eac5c553b69c6ee072b")
149
+ run_cmd(f"git checkout d73267a415fb0eba98477afa43ef71ffd82a7157")
150
+ run_cmd(f"git checkout 1a03d01356ae79179e1fb8d8915dc9cc79925742")
151
+ run_cmd(f"git checkout 81497bb3115e92c754300c9b3992df428886a3e9")
152
+ run_cmd(f"git checkout c5af1f8edcf79cb70f065c0110e279e78e48caf9")
153
+ run_cmd(f"git checkout cdb3c90109387fa4dfa92f53c3864c71170ffc77")
154
+
155
+ # Edit the file here, before copying
156
+ #edit_file(f"{temp_repo_path}/infer-web.py")
157
+
158
+ # Copy all files from the cloned repository to the existing path
159
+ copy_all_files_in_directory(temp_repo_path, repo_path)
160
+ print(f"Copying all {new_folder_name} files from GitHub.")
161
+
162
+ # Change working directory back to /content/
163
+ os.chdir('/content/')
164
+ print("Changed path back to /content/")
165
+
166
+ # Remove the temporary cloned repository
167
+ shutil.rmtree(temp_repo_path)
168
+
169
+ # Call the function
170
+ clone_and_copy_repo(repo_path)
171
+
172
+ # Download the credentials file for RVC archive sheet
173
+ os.makedirs('/content/Applio-RVC-Fork/stats/', exist_ok=True)
174
+ run_cmd("wget -q https://cdn.discordapp.com/attachments/945486970883285045/1114717554481569802/peppy-generator-388800-07722f17a188.json -O /content/Applio-RVC-Fork/stats/peppy-generator-388800-07722f17a188.json")
175
+
176
+ # Forcefully delete any existing torchcrepe dependencies downloaded from an earlier run just in case
177
+ shutil.rmtree('/content/Applio-RVC-Fork/torchcrepe', ignore_errors=True)
178
+ shutil.rmtree('/content/torchcrepe', ignore_errors=True)
179
+
180
+ # Download the torchcrepe folder from the maxrmorrison/torchcrepe repository
181
+ run_cmd("git clone https://github.com/maxrmorrison/torchcrepe.git")
182
+ shutil.move('/content/torchcrepe/torchcrepe', '/content/Applio-RVC-Fork/')
183
+ shutil.rmtree('/content/torchcrepe', ignore_errors=True) # Delete the torchcrepe repository folder
184
+
185
+ # Change the current directory to /content/Applio-RVC-Fork
186
+ os.chdir('/content/Applio-RVC-Fork')
187
+ os.makedirs('pretrained', exist_ok=True)
188
+ os.makedirs('uvr5_weights', exist_ok=True)
189
+
190
+ def download_file(url, filepath):
191
+ response = requests.get(url, stream=True)
192
+ response.raise_for_status()
193
+
194
+ with open(filepath, "wb") as file:
195
+ for chunk in response.iter_content(chunk_size=8192):
196
+ if chunk:
197
+ file.write(chunk)
198
+
199
+ def download_pretrained_models():
200
+ pretrained_models = {
201
+ "pretrained": [
202
+ "D40k.pth",
203
+ "G40k.pth",
204
+ "f0D40k.pth",
205
+ "f0G40k.pth"
206
+ ],
207
+ "pretrained_v2": [
208
+ "D40k.pth",
209
+ "G40k.pth",
210
+ "f0D40k.pth",
211
+ "f0G40k.pth",
212
+ "f0G48k.pth",
213
+ "f0D48k.pth"
214
+ ],
215
+ "uvr5_weights": [
216
+ "HP2-人声vocals+非人声instrumentals.pth",
217
+ "HP5-主旋律人声vocals+其他instrumentals.pth",
218
+ "VR-DeEchoNormal.pth",
219
+ "VR-DeEchoDeReverb.pth",
220
+ "VR-DeEchoAggressive.pth",
221
+ "HP5_only_main_vocal.pth",
222
+ "HP3_all_vocals.pth",
223
+ "HP2_all_vocals.pth"
224
+ ]
225
+ }
226
+ part2 = "I"
227
+ base_url = "https://huggingface.co/lj1995/VoiceConversionWebU" + part2 + "/resolve/main/"
228
+ base_path = "/content/Applio-RVC-Fork/"
229
+ base_pathm = base_path
230
+
231
+ # Calculate total number of files to download
232
+ total_files = sum(len(files) for files in pretrained_models.values()) + 1 # +1 for hubert_base.pt
233
+
234
+ with tqdm(total=total_files, desc="Downloading files") as pbar:
235
+ for folder, models in pretrained_models.items():
236
+ folder_path = os.path.join(base_path, folder)
237
+ os.makedirs(folder_path, exist_ok=True)
238
+ for model in models:
239
+ url = base_url + folder + "/" + model
240
+ filepath = os.path.join(folder_path, model)
241
+ download_file(url, filepath)
242
+ pbar.update()
243
+
244
+ # Download hubert_base.pt to the base path
245
+ hubert_url = base_url + "hubert_base.pt"
246
+ hubert_filepath = os.path.join(base_pathm, "hubert_base.pt")
247
+ download_file(hubert_url, hubert_filepath)
248
+ pbar.update()
249
+ def clone_repository(run_download):
250
+ with ThreadPoolExecutor(max_workers=2) as executor:
251
+ executor.submit(run_script)
252
+ if run_download:
253
+ executor.submit(download_pretrained_models)
Applio-RVC-Fork/utils/dependency.py ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import csv
3
+ import shutil
4
+ import tarfile
5
+ import subprocess
6
+ from pathlib import Path
7
+ from datetime import datetime
8
+
9
+ def install_packages_but_jank_af():
10
+ packages = ['build-essential', 'python3-dev', 'ffmpeg', 'aria2']
11
+ pip_packages = ['pip', 'setuptools', 'wheel', 'httpx==0.23.0', 'faiss-gpu', 'fairseq', 'gradio==3.34.0',
12
+ 'ffmpeg', 'ffmpeg-python', 'praat-parselmouth', 'pyworld', 'numpy==1.23.5',
13
+ 'numba==0.56.4', 'librosa==0.9.2', 'mega.py', 'gdown', 'onnxruntime', 'pyngrok==4.1.12',
14
+ 'gTTS', 'elevenlabs', 'wget', 'tensorboardX', 'unidecode', 'huggingface-hub', 'stftpitchshift==1.5.1',
15
+ 'yt-dlp', 'pedalboard', 'pathvalidate', 'nltk', 'edge-tts', 'git+https://github.com/suno-ai/bark.git', 'python-dotenv' , 'av']
16
+
17
+ print("Updating and installing system packages...")
18
+ for package in packages:
19
+ print(f"Installing {package}...")
20
+ subprocess.check_call(['apt-get', 'install', '-qq', '-y', package])
21
+
22
+ print("Updating and installing pip packages...")
23
+ subprocess.check_call(['pip', 'install', '--upgrade'] + pip_packages)
24
+
25
+ print('Packages up to date.')
26
+
27
+
28
+ def setup_environment(ForceUpdateDependencies, ForceTemporaryStorage):
29
+ # Mounting Google Drive
30
+ if not ForceTemporaryStorage:
31
+ from google.colab import drive
32
+
33
+ if not os.path.exists('/content/drive'):
34
+ drive.mount('/content/drive')
35
+ else:
36
+ print('Drive is already mounted. Proceeding...')
37
+
38
+ # Function to install dependencies with progress
39
+ def install_packages():
40
+ packages = ['build-essential', 'python3-dev', 'ffmpeg', 'aria2']
41
+ pip_packages = ['pip', 'setuptools', 'wheel', 'httpx==0.23.0', 'faiss-gpu', 'fairseq', 'gradio==3.34.0',
42
+ 'ffmpeg', 'ffmpeg-python', 'praat-parselmouth', 'pyworld', 'numpy==1.23.5',
43
+ 'numba==0.56.4', 'librosa==0.9.2', 'mega.py', 'gdown', 'onnxruntime', 'pyngrok==4.1.12',
44
+ 'gTTS', 'elevenlabs', 'wget', 'tensorboardX', 'unidecode', 'huggingface-hub', 'stftpitchshift==1.5.1',
45
+ 'yt-dlp', 'pedalboard', 'pathvalidate', 'nltk', 'edge-tts', 'git+https://github.com/suno-ai/bark.git', 'python-dotenv' , 'av']
46
+
47
+ print("Updating and installing system packages...")
48
+ for package in packages:
49
+ print(f"Installing {package}...")
50
+ subprocess.check_call(['apt-get', 'install', '-qq', '-y', package])
51
+
52
+ print("Updating and installing pip packages...")
53
+ subprocess.check_call(['pip', 'install', '--upgrade'] + pip_packages)
54
+
55
+
56
+ print('Packages up to date.')
57
+
58
+ # Function to scan a directory and writes filenames and timestamps
59
+ def scan_and_write(base_path, output_file):
60
+ with open(output_file, 'w', newline='') as f:
61
+ writer = csv.writer(f)
62
+ for dirpath, dirs, files in os.walk(base_path):
63
+ for filename in files:
64
+ fname = os.path.join(dirpath, filename)
65
+ try:
66
+ mtime = os.path.getmtime(fname)
67
+ writer.writerow([fname, mtime])
68
+ except Exception as e:
69
+ print(f'Skipping irrelevant nonexistent file {fname}: {str(e)}')
70
+ print(f'Finished recording filesystem timestamps to {output_file}.')
71
+
72
+ # Function to compare files
73
+ def compare_files(old_file, new_file):
74
+ old_files = {}
75
+ new_files = {}
76
+
77
+ with open(old_file, 'r') as f:
78
+ reader = csv.reader(f)
79
+ old_files = {rows[0]:rows[1] for rows in reader}
80
+
81
+ with open(new_file, 'r') as f:
82
+ reader = csv.reader(f)
83
+ new_files = {rows[0]:rows[1] for rows in reader}
84
+
85
+ removed_files = old_files.keys() - new_files.keys()
86
+ added_files = new_files.keys() - old_files.keys()
87
+ unchanged_files = old_files.keys() & new_files.keys()
88
+
89
+ changed_files = {f for f in unchanged_files if old_files[f] != new_files[f]}
90
+
91
+ for file in removed_files:
92
+ print(f'File has been removed: {file}')
93
+
94
+ for file in changed_files:
95
+ print(f'File has been updated: {file}')
96
+
97
+ return list(added_files) + list(changed_files)
98
+
99
+ # Check if CachedRVC.tar.gz exists
100
+ if ForceTemporaryStorage:
101
+ file_path = '/content/CachedRVC.tar.gz'
102
+ else:
103
+ file_path = '/content/drive/MyDrive/RVC_Cached/CachedRVC.tar.gz'
104
+
105
+ content_file_path = '/content/CachedRVC.tar.gz'
106
+ extract_path = '/'
107
+
108
+ if not os.path.exists(file_path):
109
+ folder_path = os.path.dirname(file_path)
110
+ os.makedirs(folder_path, exist_ok=True)
111
+ print('No cached dependency install found. Attempting to download GitHub backup..')
112
+
113
+ try:
114
+ download_url = "https://github.com/kalomaze/QuickMangioFixes/releases/download/release3/CachedRVC.tar.gz"
115
+ subprocess.run(["wget", "-O", file_path, download_url])
116
+ print('Download completed successfully!')
117
+ except Exception as e:
118
+ print('Download failed:', str(e))
119
+
120
+ # Delete the failed download file
121
+ if os.path.exists(file_path):
122
+ os.remove(file_path)
123
+ print('Failed download file deleted. Continuing manual backup..')
124
+
125
+ if Path(file_path).exists():
126
+ if ForceTemporaryStorage:
127
+ print('Finished downloading CachedRVC.tar.gz.')
128
+ else:
129
+ print('CachedRVC.tar.gz found on Google Drive. Proceeding to copy and extract...')
130
+
131
+ # Check if ForceTemporaryStorage is True and skip copying if it is
132
+ if ForceTemporaryStorage:
133
+ pass
134
+ else:
135
+ shutil.copy(file_path, content_file_path)
136
+
137
+ print('Beginning backup copy operation...')
138
+
139
+ with tarfile.open(content_file_path, 'r:gz') as tar:
140
+ for member in tar.getmembers():
141
+ target_path = os.path.join(extract_path, member.name)
142
+ try:
143
+ tar.extract(member, extract_path)
144
+ except Exception as e:
145
+ print('Failed to extract a file (this isn\'t normal)... forcing an update to compensate')
146
+ ForceUpdateDependencies = True
147
+ print(f'Extraction of {content_file_path} to {extract_path} completed.')
148
+
149
+ if ForceUpdateDependencies:
150
+ install_packages()
151
+ ForceUpdateDependencies = False
152
+ else:
153
+ print('CachedRVC.tar.gz not found. Proceeding to create an index of all current files...')
154
+ scan_and_write('/usr/', '/content/usr_files.csv')
155
+
156
+ install_packages()
157
+
158
+ scan_and_write('/usr/', '/content/usr_files_new.csv')
159
+ changed_files = compare_files('/content/usr_files.csv', '/content/usr_files_new.csv')
160
+
161
+ with tarfile.open('/content/CachedRVC.tar.gz', 'w:gz') as new_tar:
162
+ for file in changed_files:
163
+ new_tar.add(file)
164
+ print(f'Added to tar: {file}')
165
+
166
+ os.makedirs('/content/drive/MyDrive/RVC_Cached', exist_ok=True)
167
+ shutil.copy('/content/CachedRVC.tar.gz', '/content/drive/MyDrive/RVC_Cached/CachedRVC.tar.gz')
168
+ print('Updated CachedRVC.tar.gz copied to Google Drive.')
169
+ print('Dependencies fully up to date; future runs should be faster.')
170
+
Applio-RVC-Fork/utils/i18n.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import locale
2
+ import json
3
+ import os
4
+
5
+
6
+ def load_language_list(language):
7
+ with open(f"./i18n/{language}.json", "r", encoding="utf-8") as f:
8
+ language_list = json.load(f)
9
+ return language_list
10
+
11
+
12
+ class I18nAuto:
13
+ def __init__(self, language=None):
14
+ if language in ["Auto", None]:
15
+ language = "es_ES"
16
+ if not os.path.exists(f"./i18n/{language}.json"):
17
+ language = "es_ES"
18
+ language = "es_ES"
19
+ self.language = language
20
+ # print("Use Language:", language)
21
+ self.language_map = load_language_list(language)
22
+
23
+ def __call__(self, key):
24
+ return self.language_map.get(key, key)
25
+
26
+ def print(self):
27
+ # print("Use Language:", self.language)
28
+ print("")