TheStinger cappuch commited on
Commit
b98b4fc
1 Parent(s): 3664c30

updated ui, fixed audio inference thing, need to fix model selecting. ilaria tts added (#1)

Browse files

- updated ui, fixed audio inference thing, need to fix model selecting. ilaria tts added (f7216ea4ce7b4821938cea9befa19e21112e5087)


Co-authored-by: Mikus <cappuch@users.noreply.huggingface.co>

Files changed (2) hide show
  1. app.py +236 -209
  2. tts_voice.py +230 -0
app.py CHANGED
@@ -1,209 +1,236 @@
1
- import gradio as gr
2
- import requests
3
- import random
4
- import os
5
- import zipfile # built in module for unzipping files (thank god)
6
- import librosa
7
- import time
8
- from infer_rvc_python import BaseLoader
9
- from pydub import AudioSegment
10
-
11
- # fucking dogshit toggle
12
- try:
13
- import spaces
14
- spaces_status = True
15
- except ImportError:
16
- spaces_status = False
17
-
18
- converter = BaseLoader(only_cpu=False, hubert_path=None, rmvpe_path=None) # <- yeah so like this handles rvc
19
-
20
- global pth_file
21
- global index_file
22
-
23
- pth_file = "model.pth"
24
- index_file = "model.index"
25
-
26
- #CONFIGS
27
- TEMP_DIR = "temp"
28
- MODEL_PREFIX = "model"
29
- PITCH_ALGO_OPT = [
30
- "pm",
31
- "harvest",
32
- "crepe",
33
- "rmvpe",
34
- "rmvpe+",
35
- ]
36
-
37
-
38
- os.makedirs(TEMP_DIR, exist_ok=True)
39
-
40
- def unzip_file(file):
41
- filename = os.path.basename(file).split(".")[0] # converts "model.zip" to "model" so we can do things
42
- with zipfile.ZipFile(file, 'r') as zip_ref:
43
- zip_ref.extractall(os.path.join(TEMP_DIR, filename)) # might not be very ram efficient...
44
- return True
45
-
46
-
47
- def progress_bar(total, current): # best progress bar ever trust me sunglasses emoji 😎
48
- return "[" + "=" * int(current / total * 20) + ">" + " " * (20 - int(current / total * 20)) + "] " + str(int(current / total * 100)) + "%"
49
-
50
- def download_from_url(url, filename=None):
51
- if "huggingface" not in url:
52
- return ["The URL must be from huggingface", "Failed", "Failed"]
53
- if filename is None:
54
- filename = os.path.join(TEMP_DIR, MODEL_PREFIX + str(random.randint(1, 1000)) + ".zip")
55
- response = requests.get(url)
56
- total = int(response.headers.get('content-length', 0)) # bytes to download (length of the file)
57
- if total > 500000000:
58
-
59
- return ["The file is too large. You can only download files up to 500 MB in size.", "Failed", "Failed"]
60
- current = 0
61
- with open(filename, "wb") as f:
62
- for data in response.iter_content(chunk_size=4096): # download in chunks of 4096 bytes (4kb - helps with memory usage and speed)
63
- f.write(data)
64
- current += len(data)
65
- print(progress_bar(total, current), end="\r") # \r is a carriage return, it moves the cursor to the start of the line so its like tqdm sunglasses emoji 😎
66
-
67
- # unzip because the model is in a zip file lel
68
-
69
- try:
70
- unzip_file(filename)
71
- except Exception as e:
72
- return ["Failed to unzip the file", "Failed", "Failed"] # return early if it fails and like tell the user but its dogshit hahahahahahaha 😎 According to all known laws aviation, there is no way a bee should be able to fly.
73
- unzipped_dir = os.path.join(TEMP_DIR, os.path.basename(filename).split(".")[0]) # just do what we did in unzip_file because we need the directory
74
- pth_files = []
75
- index_files = []
76
- for root, dirs, files in os.walk(unzipped_dir): # could be done more efficiently because nobody stores models in subdirectories but like who cares (it's a futureproofing thing lel)
77
- for file in files:
78
- if file.endswith(".pth"):
79
- pth_files.append(os.path.join(root, file))
80
- elif file.endswith(".index"):
81
- index_files.append(os.path.join(root, file))
82
-
83
- print(pth_files, index_files) # debug print because im fucking stupid and i need to see what is going on
84
- global pth_file
85
- global index_file
86
- pth_file = pth_files[0]
87
- index_file = index_files[0]
88
- return ["Downloaded as " + filename, pth_files[0], index_files[0]]
89
-
90
- if spaces_status:
91
- @spaces.GPU()
92
- def convert_now(audio_files, random_tag, converter):
93
- return converter(
94
- audio_files,
95
- random_tag,
96
- overwrite=False,
97
- parallel_workers=8
98
- )
99
- else:
100
- def convert_now(audio_files, random_tag, converter):
101
- return converter(
102
- audio_files,
103
- random_tag,
104
- overwrite=False,
105
- parallel_workers=8
106
- )
107
-
108
- def run(
109
- audio_files,
110
- file_m,
111
- pitch_alg,
112
- pitch_lvl,
113
- file_index,
114
- index_inf,
115
- r_m_f,
116
- e_r,
117
- c_b_p,
118
- ):
119
- if not audio_files:
120
- raise ValueError("The audio pls")
121
-
122
- if isinstance(audio_files, str):
123
- audio_files = [audio_files]
124
-
125
- try:
126
- duration_base = librosa.get_duration(filename=audio_files[0])
127
- print("Duration:", duration_base)
128
- except Exception as e:
129
- print(e)
130
-
131
- random_tag = "USER_"+str(random.randint(10000000, 99999999))
132
-
133
- converter.apply_conf(
134
- tag=random_tag,
135
- file_model=file_m,
136
- pitch_algo=pitch_alg,
137
- pitch_lvl=pitch_lvl,
138
- file_index=file_index,
139
- index_influence=index_inf,
140
- respiration_median_filtering=r_m_f,
141
- envelope_ratio=e_r,
142
- consonant_breath_protection=c_b_p,
143
- resample_sr=44100 if audio_files[0].endswith('.mp3') else 0,
144
- )
145
- time.sleep(0.1)
146
-
147
- result = convert_now(audio_files, random_tag, converter)
148
-
149
- return result[0]
150
-
151
- with gr.Blocks() as demo:
152
- gr.Markdown("## Ilaria RVC 💖")
153
- with gr.Tab("Inference"):
154
- sound_gui = gr.Audio(value=None,type="filepath",autoplay=False,visible=True,)
155
- pth_file_ui = gr.Textbox(label="Model pth file",value=pth_file,visible=False,interactive=False,) # gradio is fucking weird (im with stupid v)
156
- index_file_ui = gr.Textbox(label="Index pth file",value=index_file,visible=False,interactive=False,) # gradio is fucking weird (im with stupid ^)
157
- pitch_algo_conf = gr.Dropdown(PITCH_ALGO_OPT,value=PITCH_ALGO_OPT[4],label="Pitch algorithm",visible=True,interactive=True,)
158
- pitch_lvl_conf = gr.Slider(label="Pitch level",minimum=-24,maximum=24,step=1,value=0,visible=True,interactive=True,)
159
- index_inf_conf = gr.Slider(minimum=0,maximum=1,label="Index influence",value=0.75,)
160
- respiration_filter_conf = gr.Slider(minimum=0,maximum=7,label="Respiration median filtering",value=3,step=1,interactive=True,)
161
- envelope_ratio_conf = gr.Slider(minimum=0,maximum=1,label="Envelope ratio",value=0.25,interactive=True,)
162
- consonant_protec_conf = gr.Slider(minimum=0,maximum=0.5,label="Consonant breath protection",value=0.5,interactive=True,)
163
- button_conf = gr.Button("Convert",variant="primary",)
164
- output_conf = gr.Audio(type="filepath",label="Output",)
165
-
166
- button_conf.click(
167
- run,
168
- inputs=[
169
- sound_gui,
170
- pth_file_ui,
171
- pitch_algo_conf,
172
- pitch_lvl_conf,
173
- index_file_ui, # put a bullet through my head
174
- index_inf_conf,
175
- respiration_filter_conf,
176
- envelope_ratio_conf,
177
- consonant_protec_conf,
178
- ],
179
- outputs=[output_conf],
180
- )
181
-
182
- with gr.Tab("Download Model"):
183
- # markdown
184
- gr.Markdown(
185
- "Download the model from the following URL and upload it here. (Hugginface RVC model)"
186
- )
187
- model = gr.Textbox(lines=1, label="Model URL")
188
- download_button = gr.Button("Download Model")
189
- status = gr.Textbox(lines=1, label="Status", placeholder="Waiting....", interactive=False)
190
- model_pth = gr.Textbox(lines=1, label="Model pth file", placeholder="Waiting....", interactive=False)
191
- index_pth = gr.Textbox(lines=1, label="Index pth file", placeholder="Waiting....", interactive=False)
192
- download_button.click(download_from_url, model, outputs=[status, model_pth, index_pth])
193
- set_model_button = gr.Button("Set Model")
194
- #set_model_button.click(
195
-
196
- with gr.Tab("Credits"):
197
- gr.Markdown(
198
- """
199
- Ilaria RVC made by [Ilaria](https://huggingface.co/TheStinger) suport her on [ko-fi](https://ko-fi.com/ilariaowo)
200
-
201
- The Inference code is made by [r3gm](https://huggingface.co/r3gm) (his module helped form this space 💖)
202
-
203
- made with ❤️ by [mikus](https://github.com/cappuch) - i hacked it up lel
204
-
205
- ### **In loving memory of JLabDX** 🕊️
206
- """
207
- )
208
-
209
- demo.queue(api_open=False).launch(show_api=False) # idk ilaria if you want or dont want to
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import requests
3
+ import random
4
+ import os
5
+ import zipfile # built in module for unzipping files (thank god)
6
+ import librosa
7
+ import time
8
+ from infer_rvc_python import BaseLoader
9
+ from pydub import AudioSegment
10
+ from tts_voice import tts_order_voice
11
+ import edge_tts
12
+ import tempfile
13
+ import anyio
14
+
15
+ language_dict = tts_order_voice
16
+
17
+ # ilaria tts implementation :rofl:
18
+ async def text_to_speech_edge(text, language_code):
19
+ voice = language_dict[language_code]
20
+ communicate = edge_tts.Communicate(text, voice)
21
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".mp3") as tmp_file:
22
+ tmp_path = tmp_file.name
23
+
24
+ await communicate.save(tmp_path)
25
+
26
+ return tmp_path
27
+
28
+ # fucking dogshit toggle
29
+ try:
30
+ import spaces
31
+ spaces_status = True
32
+ except ImportError:
33
+ spaces_status = False
34
+
35
+ converter = BaseLoader(only_cpu=False, hubert_path=None, rmvpe_path=None) # <- yeah so like this handles rvc
36
+
37
+ global pth_file
38
+ global index_file
39
+
40
+ pth_file = "model.pth"
41
+ index_file = "model.index"
42
+
43
+ #CONFIGS
44
+ TEMP_DIR = "temp"
45
+ MODEL_PREFIX = "model"
46
+ PITCH_ALGO_OPT = [
47
+ "pm",
48
+ "harvest",
49
+ "crepe",
50
+ "rmvpe",
51
+ "rmvpe+",
52
+ ]
53
+
54
+
55
+ os.makedirs(TEMP_DIR, exist_ok=True)
56
+
57
+ def unzip_file(file):
58
+ filename = os.path.basename(file).split(".")[0] # converts "model.zip" to "model" so we can do things
59
+ with zipfile.ZipFile(file, 'r') as zip_ref:
60
+ zip_ref.extractall(os.path.join(TEMP_DIR, filename)) # might not be very ram efficient...
61
+ return True
62
+
63
+
64
+ def progress_bar(total, current): # best progress bar ever trust me sunglasses emoji 😎
65
+ return "[" + "=" * int(current / total * 20) + ">" + " " * (20 - int(current / total * 20)) + "] " + str(int(current / total * 100)) + "%"
66
+
67
+ def download_from_url(url, filename=None):
68
+ if "huggingface" not in url:
69
+ return ["The URL must be from huggingface", "Failed", "Failed"]
70
+ if filename is None:
71
+ filename = os.path.join(TEMP_DIR, MODEL_PREFIX + str(random.randint(1, 1000)) + ".zip")
72
+ response = requests.get(url)
73
+ total = int(response.headers.get('content-length', 0)) # bytes to download (length of the file)
74
+ if total > 500000000:
75
+
76
+ return ["The file is too large. You can only download files up to 500 MB in size.", "Failed", "Failed"]
77
+ current = 0
78
+ with open(filename, "wb") as f:
79
+ for data in response.iter_content(chunk_size=4096): # download in chunks of 4096 bytes (4kb - helps with memory usage and speed)
80
+ f.write(data)
81
+ current += len(data)
82
+ print(progress_bar(total, current), end="\r") # \r is a carriage return, it moves the cursor to the start of the line so its like tqdm sunglasses emoji 😎
83
+
84
+ # unzip because the model is in a zip file lel
85
+
86
+ try:
87
+ unzip_file(filename)
88
+ except Exception as e:
89
+ return ["Failed to unzip the file", "Failed", "Failed"] # return early if it fails and like tell the user but its dogshit hahahahahahaha 😎 According to all known laws aviation, there is no way a bee should be able to fly.
90
+ unzipped_dir = os.path.join(TEMP_DIR, os.path.basename(filename).split(".")[0]) # just do what we did in unzip_file because we need the directory
91
+ pth_files = []
92
+ index_files = []
93
+ for root, dirs, files in os.walk(unzipped_dir): # could be done more efficiently because nobody stores models in subdirectories but like who cares (it's a futureproofing thing lel)
94
+ for file in files:
95
+ if file.endswith(".pth"):
96
+ pth_files.append(os.path.join(root, file))
97
+ elif file.endswith(".index"):
98
+ index_files.append(os.path.join(root, file))
99
+
100
+ print(pth_files, index_files) # debug print because im fucking stupid and i need to see what is going on
101
+ global pth_file
102
+ global index_file
103
+ pth_file = pth_files[0]
104
+ index_file = index_files[0]
105
+ return ["Downloaded as " + filename, pth_files[0], index_files[0]]
106
+
107
+ if spaces_status:
108
+ @spaces.GPU()
109
+ def convert_now(audio_files, random_tag, converter):
110
+ return converter(
111
+ audio_files,
112
+ random_tag,
113
+ overwrite=False,
114
+ parallel_workers=8
115
+ )
116
+ else:
117
+ def convert_now(audio_files, random_tag, converter):
118
+ return converter(
119
+ audio_files,
120
+ random_tag,
121
+ overwrite=False,
122
+ parallel_workers=8
123
+ )
124
+
125
+ def run(
126
+ audio_files,
127
+ file_m,
128
+ pitch_alg,
129
+ pitch_lvl,
130
+ file_index,
131
+ index_inf,
132
+ r_m_f,
133
+ e_r,
134
+ c_b_p,
135
+ ):
136
+ if not audio_files:
137
+ raise ValueError("The audio pls")
138
+
139
+ if isinstance(audio_files, str):
140
+ audio_files = [audio_files]
141
+
142
+ try:
143
+ duration_base = librosa.get_duration(filename=audio_files[0])
144
+ print("Duration:", duration_base)
145
+ except Exception as e:
146
+ print(e)
147
+
148
+ random_tag = "USER_"+str(random.randint(10000000, 99999999))
149
+
150
+ converter.apply_conf(
151
+ tag=random_tag,
152
+ file_model=file_m,
153
+ pitch_algo=pitch_alg,
154
+ pitch_lvl=pitch_lvl,
155
+ file_index=file_index,
156
+ index_influence=index_inf,
157
+ respiration_median_filtering=r_m_f,
158
+ envelope_ratio=e_r,
159
+ consonant_breath_protection=c_b_p,
160
+ resample_sr=44100 if audio_files[0].endswith('.mp3') else 0,
161
+ )
162
+ time.sleep(0.1)
163
+
164
+ result = convert_now(audio_files, random_tag, converter)
165
+
166
+ return result[0]
167
+
168
+ with gr.Blocks() as demo:
169
+ gr.Markdown("## Ilaria RVC 💖")
170
+ with gr.Tab("Inference"):
171
+ with gr.Tab("Ilaria TTS"):
172
+ text_tts = gr.Textbox(label="Text", placeholder="Hello!", lines=3, interactive=True,)
173
+ dropdown_tts = gr.Dropdown(label="Language and Model",choices=list(language_dict.keys()),interactive=True, value=list(language_dict.keys())[0])
174
+
175
+ button_tts = gr.Button("Convert", variant="primary",)
176
+
177
+ output_tts = gr.Audio(type="filepath", label="Output",)
178
+
179
+ button_tts.click(text_to_speech_edge, inputs=[text_tts, dropdown_tts], outputs=[output_tts])
180
+
181
+
182
+ sound_gui = gr.Audio(value=None,type="filepath",autoplay=False,visible=True,)
183
+ pth_file_ui = gr.Textbox(label="Model pth file",value=pth_file,visible=False,interactive=False,) # gradio is fucking weird (im with stupid v)
184
+ index_file_ui = gr.Textbox(label="Index pth file",value=index_file,visible=False,interactive=False,) # gradio is fucking weird (im with stupid ^)
185
+ pitch_algo_conf = gr.Dropdown(PITCH_ALGO_OPT,value=PITCH_ALGO_OPT[4],label="Pitch algorithm",visible=True,interactive=True,)
186
+ pitch_lvl_conf = gr.Slider(label="Pitch level",minimum=-24,maximum=24,step=1,value=0,visible=True,interactive=True,)
187
+ index_inf_conf = gr.Slider(minimum=0,maximum=1,label="Index influence",value=0.75,)
188
+ respiration_filter_conf = gr.Slider(minimum=0,maximum=7,label="Respiration median filtering",value=3,step=1,interactive=True,)
189
+ envelope_ratio_conf = gr.Slider(minimum=0,maximum=1,label="Envelope ratio",value=0.25,interactive=True,)
190
+ consonant_protec_conf = gr.Slider(minimum=0,maximum=0.5,label="Consonant breath protection",value=0.5,interactive=True,)
191
+ button_conf = gr.Button("Convert",variant="primary",)
192
+ output_conf = gr.Audio(type="filepath",label="Output",)
193
+
194
+ button_conf.click(lambda :None, None, output_conf)
195
+ button_conf.click(
196
+ run,
197
+ inputs=[
198
+ sound_gui,
199
+ pth_file_ui,
200
+ pitch_algo_conf,
201
+ pitch_lvl_conf,
202
+ index_file_ui, # put a bullet through my head
203
+ index_inf_conf,
204
+ respiration_filter_conf,
205
+ envelope_ratio_conf,
206
+ consonant_protec_conf,
207
+ ],
208
+ outputs=[output_conf],
209
+ )
210
+
211
+ with gr.Tab("Download Model"):
212
+ # markdown
213
+ gr.Markdown(
214
+ "Download the model from the following URL and upload it here. (Hugginface RVC model)"
215
+ )
216
+ model = gr.Textbox(lines=1, label="Model URL")
217
+ download_button = gr.Button("Download Model")
218
+ status = gr.Textbox(lines=1, label="Status", placeholder="Waiting....", interactive=False)
219
+ model_pth = gr.Textbox(lines=1, label="Model pth file", placeholder="Waiting....", interactive=False)
220
+ index_pth = gr.Textbox(lines=1, label="Index pth file", placeholder="Waiting....", interactive=False)
221
+ download_button.click(download_from_url, model, outputs=[status, model_pth, index_pth])
222
+ set_model_button = gr.Button("Set Model")
223
+ #set_model_button.click(
224
+
225
+ with gr.Tab("Credits"):
226
+ gr.Markdown(
227
+ """
228
+ Ilaria RVC made by [Ilaria](https://huggingface.co/TheStinger) suport her on [ko-fi](https://ko-fi.com/ilariaowo)
229
+
230
+ The Inference code is made by [r3gm](https://huggingface.co/r3gm) (his module helped form this space 💖)
231
+
232
+ made with ❤️ by [mikus](https://github.com/cappuch) - i hacked it up lel
233
+ """
234
+ )
235
+
236
+ demo.queue(api_open=False).launch(show_api=False) # idk ilaria if you want or dont want to
tts_voice.py ADDED
@@ -0,0 +1,230 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ tts_order_voice = {'English-Jenny (Woman)': 'en-US-JennyNeural',
2
+ 'English-Guy (Man)': 'en-US-GuyNeural',
3
+ 'English-Ana (Woman)': 'en-US-AnaNeural',
4
+ 'English-Aria (Woman)': 'en-US-AriaNeural',
5
+ 'English-Christopher (Man)': 'en-US-ChristopherNeural',
6
+ 'English-Eric (Man)': 'en-US-EricNeural',
7
+ 'English-Michelle (Woman)': 'en-US-MichelleNeural',
8
+ 'English-Roger (Man)': 'en-US-RogerNeural',
9
+ 'Spanish (Mexican)-Dalia (Woman)': 'es-MX-DaliaNeural',
10
+ 'Spanish (Mexican)-Jorge- (Man)': 'es-MX-JorgeNeural',
11
+ 'Korean-Sun-Hi- (Woman)': 'ko-KR-SunHiNeural',
12
+ 'Korean-InJoon- (Man)': 'ko-KR-InJoonNeural',
13
+ 'Thai-Premwadee- (Woman)': 'th-TH-PremwadeeNeural',
14
+ 'Thai-Niwat- (Man)': 'th-TH-NiwatNeural',
15
+ 'Vietnamese-HoaiMy- (Woman)': 'vi-VN-HoaiMyNeural',
16
+ 'Vietnamese-NamMinh- (Man)': 'vi-VN-NamMinhNeural',
17
+ 'Japanese-Nanami- (Woman)': 'ja-JP-NanamiNeural',
18
+ 'Japanese-Keita- (Man)': 'ja-JP-KeitaNeural',
19
+ 'French-Denise- (Woman)': 'fr-FR-DeniseNeural',
20
+ 'French-Eloise- (Woman)': 'fr-FR-EloiseNeural',
21
+ 'French-Henri- (Man)': 'fr-FR-HenriNeural',
22
+ 'Brazilian-Francisca- (Woman)': 'pt-BR-FranciscaNeural',
23
+ 'Brazilian-Antonio- (Man)': 'pt-BR-AntonioNeural',
24
+ 'Indonesian-Ardi- (Man)': 'id-ID-ArdiNeural',
25
+ 'Indonesian-Gadis- (Woman)': 'id-ID-GadisNeural',
26
+ 'Hebrew-Avri- (Man)': 'he-IL-AvriNeural',
27
+ 'Hebrew-Hila- (Woman)': 'he-IL-HilaNeural',
28
+ 'Italian-Isabella- (Woman)': 'it-IT-IsabellaNeural',
29
+ 'Italian-Diego- (Man)': 'it-IT-DiegoNeural',
30
+ 'Italian-Elsa- (Woman)': 'it-IT-ElsaNeural',
31
+ 'Dutch-Colette- (Woman)': 'nl-NL-ColetteNeural',
32
+ 'Dutch-Fenna- (Woman)': 'nl-NL-FennaNeural',
33
+ 'Dutch-Maarten- (Man)': 'nl-NL-MaartenNeural',
34
+ 'Malese-Osman- (Man)': 'ms-MY-OsmanNeural',
35
+ 'Malese-Yasmin- (Woman)': 'ms-MY-YasminNeural',
36
+ 'Norwegian-Pernille- (Woman)': 'nb-NO-PernilleNeural',
37
+ 'Norwegian-Finn- (Man)': 'nb-NO-FinnNeural',
38
+ 'Swedish-Sofie- (Woman)': 'sv-SE-SofieNeural',
39
+ 'ArabicSwedish-Mattias- (Man)': 'sv-SE-MattiasNeural',
40
+ 'Arabic-Hamed- (Man)': 'ar-SA-HamedNeural',
41
+ 'Arabic-Zariyah- (Woman)': 'ar-SA-ZariyahNeural',
42
+ 'Greek-Athina- (Woman)': 'el-GR-AthinaNeural',
43
+ 'Greek-Nestoras- (Man)': 'el-GR-NestorasNeural',
44
+ 'German-Katja- (Woman)': 'de-DE-KatjaNeural',
45
+ 'German-Amala- (Woman)': 'de-DE-AmalaNeural',
46
+ 'German-Conrad- (Man)': 'de-DE-ConradNeural',
47
+ 'German-Killian- (Man)': 'de-DE-KillianNeural',
48
+ 'Afrikaans-Adri- (Woman)': 'af-ZA-AdriNeural',
49
+ 'Afrikaans-Willem- (Man)': 'af-ZA-WillemNeural',
50
+ 'Ethiopian-Ameha- (Man)': 'am-ET-AmehaNeural',
51
+ 'Ethiopian-Mekdes- (Woman)': 'am-ET-MekdesNeural',
52
+ 'Arabic (UAD)-Fatima- (Woman)': 'ar-AE-FatimaNeural',
53
+ 'Arabic (UAD)-Hamdan- (Man)': 'ar-AE-HamdanNeural',
54
+ 'Arabic (Bahrain)-Ali- (Man)': 'ar-BH-AliNeural',
55
+ 'Arabic (Bahrain)-Laila- (Woman)': 'ar-BH-LailaNeural',
56
+ 'Arabic (Algeria)-Ismael- (Man)': 'ar-DZ-IsmaelNeural',
57
+ 'Arabic (Egypt)-Salma- (Woman)': 'ar-EG-SalmaNeural',
58
+ 'Arabic (Egypt)-Shakir- (Man)': 'ar-EG-ShakirNeural',
59
+ 'Arabic (Iraq)-Bassel- (Man)': 'ar-IQ-BasselNeural',
60
+ 'Arabic (Iraq)-Rana- (Woman)': 'ar-IQ-RanaNeural',
61
+ 'Arabic (Jordan)-Sana- (Woman)': 'ar-JO-SanaNeural',
62
+ 'Arabic (Jordan)-Taim- (Man)': 'ar-JO-TaimNeural',
63
+ 'Arabic (Kuwait)-Fahed- (Man)': 'ar-KW-FahedNeural',
64
+ 'Arabic (Kuwait)-Noura- (Woman)': 'ar-KW-NouraNeural',
65
+ 'Arabic (Lebanon)-Layla- (Woman)': 'ar-LB-LaylaNeural',
66
+ 'Arabic (Lebanon)-Rami- (Man)': 'ar-LB-RamiNeural',
67
+ 'Arabic (Libya)-Iman- (Woman)': 'ar-LY-ImanNeural',
68
+ 'Arabic (Libya)-Omar- (Man)': 'ar-LY-OmarNeural',
69
+ 'Arabic (Morocco)-Jamal- (Man)': 'ar-MA-JamalNeural',
70
+ 'Arabic (Morocco)-Mouna- (Woman)': 'ar-MA-MounaNeural',
71
+ 'Arabic (Oman)-Abdullah- (Man)': 'ar-OM-AbdullahNeural',
72
+ 'Arabic (Oman)-Aysha- (Woman)': 'ar-OM-AyshaNeural',
73
+ 'Arabic (Qatar)-Amal- (Woman)': 'ar-QA-AmalNeural',
74
+ 'Arabic (Qatar)-Moaz- (Man)': 'ar-QA-MoazNeural',
75
+ 'Arabic (Syrian Arab Republic)-Amany- (Woman)': 'ar-SY-AmanyNeural',
76
+ 'Arabic (Syrian Arab Republic)-Laith- (Man)': 'ar-SY-LaithNeural',
77
+ 'Arabic (Tunisia)-Hedi- (Man)': 'ar-TN-HediNeural',
78
+ 'Arabic (Tunisia)-Reem- (Woman)': 'ar-TN-ReemNeural',
79
+ 'Arabic (Yemen )-Maryam- (Woman)': 'ar-YE-MaryamNeural',
80
+ 'Arabic (Yemen )-Saleh- (Man)': 'ar-YE-SalehNeural',
81
+ 'Azerbaijani-Babek- (Man)': 'az-AZ-BabekNeural',
82
+ 'Azerbaijani-Banu- (Woman)': 'az-AZ-BanuNeural',
83
+ 'Bulgarian-Borislav- (Man)': 'bg-BG-BorislavNeural',
84
+ 'Bulgarian-Kalina- (Woman)': 'bg-BG-KalinaNeural',
85
+ 'Bengali (Bangladesh)-Nabanita- (Woman)': 'bn-BD-NabanitaNeural',
86
+ 'Bengali (Bangladesh)-Pradeep- (Man)': 'bn-BD-PradeepNeural',
87
+ 'Bengali (India)-Bashkar- (Man)': 'bn-IN-BashkarNeural',
88
+ 'Bengali (India)-Tanishaa- (Woman)': 'bn-IN-TanishaaNeural',
89
+ 'Bosniak (Bosnia and Herzegovina)-Goran- (Man)': 'bs-BA-GoranNeural',
90
+ 'Bosniak (Bosnia and Herzegovina)-Vesna- (Woman)': 'bs-BA-VesnaNeural',
91
+ 'Catalan (Spain)-Joana- (Woman)': 'ca-ES-JoanaNeural',
92
+ 'Catalan (Spain)-Enric- (Man)': 'ca-ES-EnricNeural',
93
+ 'Czech (Czech Republic)-Antonin- (Man)': 'cs-CZ-AntoninNeural',
94
+ 'Czech (Czech Republic)-Vlasta- (Woman)': 'cs-CZ-VlastaNeural',
95
+ 'Welsh (UK)-Aled- (Man)': 'cy-GB-AledNeural',
96
+ 'Welsh (UK)-Nia- (Woman)': 'cy-GB-NiaNeural',
97
+ 'Danish (Denmark)-Christel- (Woman)': 'da-DK-ChristelNeural',
98
+ 'Danish (Denmark)-Jeppe- (Man)': 'da-DK-JeppeNeural',
99
+ 'German (Austria)-Ingrid- (Woman)': 'de-AT-IngridNeural',
100
+ 'German (Austria)-Jonas- (Man)': 'de-AT-JonasNeural',
101
+ 'German (Switzerland)-Jan- (Man)': 'de-CH-JanNeural',
102
+ 'German (Switzerland)-Leni- (Woman)': 'de-CH-LeniNeural',
103
+ 'English (Australia)-Natasha- (Woman)': 'en-AU-NatashaNeural',
104
+ 'English (Australia)-William- (Man)': 'en-AU-WilliamNeural',
105
+ 'English (Canada)-Clara- (Woman)': 'en-CA-ClaraNeural',
106
+ 'English (Canada)-Liam- (Man)': 'en-CA-LiamNeural',
107
+ 'English (UK)-Libby- (Woman)': 'en-GB-LibbyNeural',
108
+ 'English (UK)-Maisie- (Woman)': 'en-GB-MaisieNeural',
109
+ 'English (UK)-Ryan- (Man)': 'en-GB-RyanNeural',
110
+ 'English (UK)-Sonia- (Woman)': 'en-GB-SoniaNeural',
111
+ 'English (UK)-Thomas- (Man)': 'en-GB-ThomasNeural',
112
+ 'English (Hong Kong)-Sam- (Man)': 'en-HK-SamNeural',
113
+ 'English (Hong Kong)-Yan- (Woman)': 'en-HK-YanNeural',
114
+ 'English (Ireland)-Connor- (Man)': 'en-IE-ConnorNeural',
115
+ 'English (Ireland)-Emily- (Woman)': 'en-IE-EmilyNeural',
116
+ 'English (India)-Neerja- (Woman)': 'en-IN-NeerjaNeural',
117
+ 'English (India)-Prabhat- (Man)': 'en-IN-PrabhatNeural',
118
+ 'English (Kenya)-Asilia- (Woman)': 'en-KE-AsiliaNeural',
119
+ 'English (Kenya)-Chilemba- (Man)': 'en-KE-ChilembaNeural',
120
+ 'English (Nigeria)-Abeo- (Man)': 'en-NG-AbeoNeural',
121
+ 'English (Nigeria)-Ezinne- (Woman)': 'en-NG-EzinneNeural',
122
+ 'English (New Zealand)-Mitchell- (Man)': 'en-NZ-MitchellNeural',
123
+ 'English (Philippines)-James- (Man)': 'en-PH-JamesNeural',
124
+ 'English (Philippines)-Rosa- (Woman)': 'en-PH-RosaNeural',
125
+ 'English (Singapore)-Luna- (Woman)': 'en-SG-LunaNeural',
126
+ 'English (Singapore)-Wayne- (Man)': 'en-SG-WayneNeural',
127
+ 'English (Tanzania)-Elimu- (Man)': 'en-TZ-ElimuNeural',
128
+ 'English (Tanzania)-Imani- (Woman)': 'en-TZ-ImaniNeural',
129
+ 'English (South Africa)-Leah- (Woman)': 'en-ZA-LeahNeural',
130
+ 'English (South Africa)-Luke- (Man)': 'en-ZA-LukeNeural',
131
+ 'Spanish (Argentina)-Elena- (Woman)': 'es-AR-ElenaNeural',
132
+ 'Spanish (Argentina)-Tomas- (Man)': 'es-AR-TomasNeural',
133
+ 'Spanish (Bolivia)-Marcelo- (Man)': 'es-BO-MarceloNeural',
134
+ 'Spanish (Bolivia)-Sofia- (Woman)': 'es-BO-SofiaNeural',
135
+ 'Spanish (Colombia)-Gonzalo- (Man)': 'es-CO-GonzaloNeural',
136
+ 'Spanish (Colombia)-Salome- (Woman)': 'es-CO-SalomeNeural',
137
+ 'Spanish (Costa Rica)-Juan- (Man)': 'es-CR-JuanNeural',
138
+ 'Spanish (Costa Rica)-Maria- (Woman)': 'es-CR-MariaNeural',
139
+ 'Spanish (Cuba)-Belkys- (Woman)': 'es-CU-BelkysNeural',
140
+ 'Spanish (Dominican Republic)-Emilio- (Man)': 'es-DO-EmilioNeural',
141
+ 'Spanish (Dominican Republic)-Ramona- (Woman)': 'es-DO-RamonaNeural',
142
+ 'Spanish (Ecuador)-Andrea- (Woman)': 'es-EC-AndreaNeural',
143
+ 'Spanish (Ecuador)-Luis- (Man)': 'es-EC-LuisNeural',
144
+ 'Spanish (Spain)-Alvaro- (Man)': 'es-ES-AlvaroNeural',
145
+ 'Spanish (Spain)-Elvira- (Woman)': 'es-ES-ElviraNeural',
146
+ 'Spanish (Equatorial Guinea)-Teresa- (Woman)': 'es-GQ-TeresaNeural',
147
+ 'Spanish (Guatemala)-Andres- (Man)': 'es-GT-AndresNeural',
148
+ 'Spanish (Guatemala)-Marta- (Woman)': 'es-GT-MartaNeural',
149
+ 'Spanish (Honduras)-Carlos- (Man)': 'es-HN-CarlosNeural',
150
+ 'Spanish (Honduras)-Karla- (Woman)': 'es-HN-KarlaNeural',
151
+ 'Spanish (Nicaragua)-Federico- (Man)': 'es-NI-FedericoNeural',
152
+ 'Spanish (Nicaragua)-Yolanda- (Woman)': 'es-NI-YolandaNeural',
153
+ 'Spanish (Panama)-Margarita- (Woman)': 'es-PA-MargaritaNeural',
154
+ 'Spanish (Panama)-Roberto- (Man)': 'es-PA-RobertoNeural',
155
+ 'Spanish (Peru)-Alex- (Man)': 'es-PE-AlexNeural',
156
+ 'Spanish (Peru)-Camila- (Woman)': 'es-PE-CamilaNeural',
157
+ 'Spanish (Puerto Rico)-Karina- (Woman)': 'es-PR-KarinaNeural',
158
+ 'Spanish (Puerto Rico)-Victor- (Man)': 'es-PR-VictorNeural',
159
+ 'Spanish (Paraguay)-Mario- (Man)': 'es-PY-MarioNeural',
160
+ 'Spanish (Paraguay)-Tania- (Woman)': 'es-PY-TaniaNeural',
161
+ 'Spanish (El Salvador)-Lorena- (Woman)': 'es-SV-LorenaNeural',
162
+ 'Spanish (El Salvador)-Rodrigo- (Man)': 'es-SV-RodrigoNeural',
163
+ 'Spanish (United States)-Alonso- (Man)': 'es-US-AlonsoNeural',
164
+ 'Spanish (United States)-Paloma- (Woman)': 'es-US-PalomaNeural',
165
+ 'Spanish (Uruguay)-Mateo- (Man)': 'es-UY-MateoNeural',
166
+ 'Spanish (Uruguay)-Valentina- (Woman)': 'es-UY-ValentinaNeural',
167
+ 'Spanish (Venezuela)-Paola- (Woman)': 'es-VE-PaolaNeural',
168
+ 'Spanish (Venezuela)-Sebastian- (Man)': 'es-VE-SebastianNeural',
169
+ 'Estonian (Estonia)-Anu- (Woman)': 'et-EE-AnuNeural',
170
+ 'Estonian (Estonia)-Kert- (Man)': 'et-EE-KertNeural',
171
+ 'Persian (Iran)-Dilara- (Woman)': 'fa-IR-DilaraNeural',
172
+ 'Persian (Iran)-Farid- (Man)': 'fa-IR-FaridNeural',
173
+ 'Finnish (Finland)-Harri- (Man)': 'fi-FI-HarriNeural',
174
+ 'Finnish (Finland)-Noora- (Woman)': 'fi-FI-NooraNeural',
175
+ 'French (Belgium)-Charline- (Woman)': 'fr-BE-CharlineNeural',
176
+ 'French (Belgium)-Gerard- (Man)': 'fr-BE-GerardNeural',
177
+ 'French (Canada)-Sylvie- (Woman)': 'fr-CA-SylvieNeural',
178
+ 'French (Canada)-Antoine- (Man)': 'fr-CA-AntoineNeural',
179
+ 'French (Canada)-Jean- (Man)': 'fr-CA-JeanNeural',
180
+ 'French (Switzerland)-Ariane- (Woman)': 'fr-CH-ArianeNeural',
181
+ 'French (Switzerland)-Fabrice- (Man)': 'fr-CH-FabriceNeural',
182
+ 'Irish (Ireland)-Colm- (Man)': 'ga-IE-ColmNeural',
183
+ 'Irish (Ireland)-Orla- (Woman)': 'ga-IE-OrlaNeural',
184
+ 'Galician (Spain)-Roi- (Man)': 'gl-ES-RoiNeural',
185
+ 'Galician (Spain)-Sabela- (Woman)': 'gl-ES-SabelaNeural',
186
+ 'Gujarati (India)-Dhwani- (Woman)': 'gu-IN-DhwaniNeural',
187
+ 'Gujarati (India)-Niranjan- (Man)': 'gu-IN-NiranjanNeural',
188
+ 'Hindi (India)-Madhur- (Man)': 'hi-IN-MadhurNeural',
189
+ 'Hindi (India)-Swara- (Woman)': 'hi-IN-SwaraNeural',
190
+ 'Croatian (Croatia)-Gabrijela- (Woman)': 'hr-HR-GabrijelaNeural',
191
+ 'Croatian (Croatia)-Srecko- (Man)': 'hr-HR-SreckoNeural',
192
+ 'Hungarian (Hungary)-Noemi- (Woman)': 'hu-HU-NoemiNeural',
193
+ 'Hungarian (Hungary)-Tamas- (Man)': 'hu-HU-TamasNeural',
194
+ 'Icelandic (Iceland)-Gudrun- (Woman)': 'is-IS-GudrunNeural',
195
+ 'Icelandic (Iceland)-Gunnar- (Man)': 'is-IS-GunnarNeural',
196
+ 'Javanese (Indonesia)-Dimas- (Man)': 'jv-ID-DimasNeural',
197
+ 'Javanese (Indonesia)-Siti- (Woman)': 'jv-ID-SitiNeural',
198
+ 'Georgian (Georgia)-Eka- (Woman)': 'ka-GE-EkaNeural',
199
+ 'Georgian (Georgia)-Giorgi- (Man)': 'ka-GE-GiorgiNeural',
200
+ 'Kazakh (Kazakhstan)-Aigul- (Woman)': 'kk-KZ-AigulNeural',
201
+ 'Kazakh (Kazakhstan)-Daulet- (Man)': 'kk-KZ-DauletNeural',
202
+ 'Khmer (Cambodia)-Piseth- (Man)': 'km-KH-PisethNeural',
203
+ 'Khmer (Cambodia)-Sreymom- (Woman)': 'km-KH-SreymomNeural',
204
+ 'Kannada (India)-Gagan- (Man)': 'kn-IN-GaganNeural',
205
+ 'Kannada (India)-Sapna- (Woman)': 'kn-IN-SapnaNeural',
206
+ 'Lao (Laos)-Chanthavong- (Man)': 'lo-LA-ChanthavongNeural',
207
+ 'Lao (Laos)-Keomany- (Woman)': 'lo-LA-KeomanyNeural',
208
+ 'Lithuanian (Lithuania)-Leonas- (Man)': 'lt-LT-LeonasNeural',
209
+ 'Lithuanian (Lithuania)-Ona- (Woman)': 'lt-LT-OnaNeural',
210
+ 'Latvian (Latvia)-Everita- (Woman)': 'lv-LV-EveritaNeural',
211
+ 'Latvian (Latvia)-Nils- (Man)': 'lv-LV-NilsNeural',
212
+ 'Macedonian (North Macedonia)-Aleksandar- (Man)': 'mk-MK-AleksandarNeural',
213
+ 'Macedonian (North Macedonia)-Marija- (Woman)': 'mk-MK-MarijaNeural',
214
+ 'Malayalam (India)-Midhun- (Man)': 'ml-IN-MidhunNeural',
215
+ 'Malayalam (India)-Sobhana- (Woman)': 'ml-IN-SobhanaNeural',
216
+ 'Mongolian (Mongolia)-Bataa- (Man)': 'mn-MN-BataaNeural',
217
+ 'Mongolian (Mongolia)-Yesui- (Woman)': 'mn-MN-YesuiNeural',
218
+ 'Marathi (India)-Aarohi- (Woman)': 'mr-IN-AarohiNeural',
219
+ 'Marathi (India)-Manohar- (Man)': 'mr-IN-ManoharNeural',
220
+ 'Maltese (Malta)-Grace- (Woman)': 'mt-MT-GraceNeural',
221
+ 'Maltese (Malta)-Joseph- (Man)': 'mt-MT-JosephNeural',
222
+ 'Burmese (Myanmar)-Nilar- (Woman)': 'my-MM-NilarNeural',
223
+ 'Burmese (Myanmar)-Thiha- (Man)': 'my-MM-ThihaNeural',
224
+ 'Nepali (Nepal)-Hemkala- (Woman)': 'ne-NP-HemkalaNeural',
225
+ 'Nepali (Nepal)-Sagar- (Man)': 'ne-NP-SagarNeural',
226
+ 'Dutch (Belgium)-Arnaud- (Man)': 'nl-BE-ArnaudNeural',
227
+ 'Dutch (Belgium)-Dena- (Woman)': 'nl-BE-DenaNeural',
228
+ 'Polish (Poland)-Marek- (Man)': 'pl-PL-MarekNeural',
229
+ 'Polish (Poland)-Zofia- (Woman)': 'pl-PL-ZofiaNeural',
230
+ 'Pashto (Afghanistan)-Gul Nawaz- (Man)': 'ps-AF-Gul',}