Spaces:
Running
Running
Added support for the seamless-m4t-v2-large T2TT translation model.
Browse files- app.py +27 -6
- config.json5 +17 -0
- docs/translateModel.md +4 -4
- src/config.py +2 -2
- src/translation/translationLangs.py +238 -215
- src/translation/translationModel.py +25 -5
app.py
CHANGED
@@ -39,8 +39,9 @@ from src.whisper.abstractWhisperContainer import AbstractWhisperContainer
|
|
39 |
from src.whisper.whisperFactory import create_whisper_container
|
40 |
from src.translation.translationModel import TranslationModel
|
41 |
from src.translation.translationLangs import (TranslationLang,
|
42 |
-
_TO_LANG_CODE_WHISPER,
|
43 |
-
|
|
|
44 |
import shutil
|
45 |
import zhconv
|
46 |
import tqdm
|
@@ -235,6 +236,8 @@ class WhisperTranscriber:
|
|
235 |
ALMALangName: str = decodeOptions.pop("ALMALangName")
|
236 |
madlad400ModelName: str = decodeOptions.pop("madlad400ModelName")
|
237 |
madlad400LangName: str = decodeOptions.pop("madlad400LangName")
|
|
|
|
|
238 |
|
239 |
translationBatchSize: int = decodeOptions.pop("translationBatchSize")
|
240 |
translationNoRepeatNgramSize: int = decodeOptions.pop("translationNoRepeatNgramSize")
|
@@ -376,6 +379,11 @@ class WhisperTranscriber:
|
|
376 |
selectedModelName = madlad400ModelName if madlad400ModelName is not None and len(madlad400ModelName) > 0 else "madlad400-3b-mt-ct2-int8_float16/SoybeanMilk"
|
377 |
selectedModel = next((modelConfig for modelConfig in self.app_config.models["madlad400"] if modelConfig.name == selectedModelName), None)
|
378 |
translationLang = get_lang_from_m2m100_name(madlad400LangName)
|
|
|
|
|
|
|
|
|
|
|
379 |
|
380 |
if translationLang is not None:
|
381 |
translationModel = TranslationModel(modelConfig=selectedModel, whisperLang=whisperLang, translationLang=translationLang, batchSize=translationBatchSize, noRepeatNgramSize=translationNoRepeatNgramSize, numBeams=translationNumBeams, torchDtypeFloat16=translationTorchDtypeFloat16, usingBitsandbytes=translationUsingBitsandbytes)
|
@@ -938,6 +946,7 @@ def create_ui(app_config: ApplicationConfig):
|
|
938 |
mt5_models = app_config.get_model_names("mt5")
|
939 |
ALMA_models = app_config.get_model_names("ALMA")
|
940 |
madlad400_models = app_config.get_model_names("madlad400")
|
|
|
941 |
if not torch.cuda.is_available(): # Loading only quantized or models with medium-low parameters in an environment without GPU support.
|
942 |
nllb_models = list(filter(lambda nllb: any(name in nllb for name in ["-600M", "-1.3B", "-3.3B-ct2"]), nllb_models))
|
943 |
m2m100_models = list(filter(lambda m2m100: "12B" not in m2m100, m2m100_models))
|
@@ -968,6 +977,10 @@ def create_ui(app_config: ApplicationConfig):
|
|
968 |
gr.Dropdown(label="madlad400 - Model (for translate)", choices=madlad400_models, elem_id="madlad400ModelName"),
|
969 |
gr.Dropdown(label="madlad400 - Language", choices=sorted(get_lang_m2m100_names()), elem_id="madlad400LangName"),
|
970 |
}
|
|
|
|
|
|
|
|
|
971 |
|
972 |
common_translation_inputs = lambda : {
|
973 |
gr.Number(label="Translation - Batch Size", precision=0, value=app_config.translation_batch_size, elem_id="translationBatchSize"),
|
@@ -1054,14 +1067,18 @@ def create_ui(app_config: ApplicationConfig):
|
|
1054 |
with gr.Tab(label="ALMA") as simpleALMATab:
|
1055 |
with gr.Row():
|
1056 |
simpleInputDict.update(common_ALMA_inputs())
|
1057 |
-
with gr.Tab(label="madlad400") as
|
1058 |
with gr.Row():
|
1059 |
simpleInputDict.update(common_madlad400_inputs())
|
|
|
|
|
|
|
1060 |
simpleM2M100Tab.select(fn=lambda: "m2m100", inputs = [], outputs= [simpleTranslateInput] )
|
1061 |
simpleNllbTab.select(fn=lambda: "nllb", inputs = [], outputs= [simpleTranslateInput] )
|
1062 |
simpleMT5Tab.select(fn=lambda: "mt5", inputs = [], outputs= [simpleTranslateInput] )
|
1063 |
simpleALMATab.select(fn=lambda: "ALMA", inputs = [], outputs= [simpleTranslateInput] )
|
1064 |
-
|
|
|
1065 |
with gr.Column():
|
1066 |
with gr.Tab(label="URL") as simpleUrlTab:
|
1067 |
simpleInputDict.update({gr.Text(label="URL (YouTube, etc.)", elem_id = "urlData")})
|
@@ -1125,14 +1142,18 @@ def create_ui(app_config: ApplicationConfig):
|
|
1125 |
with gr.Tab(label="ALMA") as fullALMATab:
|
1126 |
with gr.Row():
|
1127 |
fullInputDict.update(common_ALMA_inputs())
|
1128 |
-
with gr.Tab(label="madlad400") as
|
1129 |
with gr.Row():
|
1130 |
fullInputDict.update(common_madlad400_inputs())
|
|
|
|
|
|
|
1131 |
fullM2M100Tab.select(fn=lambda: "m2m100", inputs = [], outputs= [fullTranslateInput] )
|
1132 |
fullNllbTab.select(fn=lambda: "nllb", inputs = [], outputs= [fullTranslateInput] )
|
1133 |
fullMT5Tab.select(fn=lambda: "mt5", inputs = [], outputs= [fullTranslateInput] )
|
1134 |
fullALMATab.select(fn=lambda: "ALMA", inputs = [], outputs= [fullTranslateInput] )
|
1135 |
-
|
|
|
1136 |
with gr.Column():
|
1137 |
with gr.Tab(label="URL") as fullUrlTab:
|
1138 |
fullInputDict.update({gr.Text(label="URL (YouTube, etc.)", elem_id = "urlData")})
|
|
|
39 |
from src.whisper.whisperFactory import create_whisper_container
|
40 |
from src.translation.translationModel import TranslationModel
|
41 |
from src.translation.translationLangs import (TranslationLang,
|
42 |
+
_TO_LANG_CODE_WHISPER, sort_lang_by_whisper_codes,
|
43 |
+
get_lang_from_whisper_name, get_lang_from_whisper_code, get_lang_from_nllb_name, get_lang_from_m2m100_name, get_lang_from_seamlessTx_name,
|
44 |
+
get_lang_whisper_names, get_lang_nllb_names, get_lang_m2m100_names, get_lang_seamlessTx_names)
|
45 |
import shutil
|
46 |
import zhconv
|
47 |
import tqdm
|
|
|
236 |
ALMALangName: str = decodeOptions.pop("ALMALangName")
|
237 |
madlad400ModelName: str = decodeOptions.pop("madlad400ModelName")
|
238 |
madlad400LangName: str = decodeOptions.pop("madlad400LangName")
|
239 |
+
seamlessModelName: str = decodeOptions.pop("seamlessModelName")
|
240 |
+
seamlessLangName: str = decodeOptions.pop("seamlessLangName")
|
241 |
|
242 |
translationBatchSize: int = decodeOptions.pop("translationBatchSize")
|
243 |
translationNoRepeatNgramSize: int = decodeOptions.pop("translationNoRepeatNgramSize")
|
|
|
379 |
selectedModelName = madlad400ModelName if madlad400ModelName is not None and len(madlad400ModelName) > 0 else "madlad400-3b-mt-ct2-int8_float16/SoybeanMilk"
|
380 |
selectedModel = next((modelConfig for modelConfig in self.app_config.models["madlad400"] if modelConfig.name == selectedModelName), None)
|
381 |
translationLang = get_lang_from_m2m100_name(madlad400LangName)
|
382 |
+
elif translateInput == "seamless" and seamlessLangName is not None and len(seamlessLangName) > 0:
|
383 |
+
selectedModelName = seamlessModelName if seamlessModelName is not None and len(seamlessModelName) > 0 else "facebook/seamless-m4t-v2-large"
|
384 |
+
selectedModel = next((modelConfig for modelConfig in self.app_config.models["seamless"] if modelConfig.name == selectedModelName), None)
|
385 |
+
translationLang = get_lang_from_seamlessTx_name(seamlessLangName)
|
386 |
+
|
387 |
|
388 |
if translationLang is not None:
|
389 |
translationModel = TranslationModel(modelConfig=selectedModel, whisperLang=whisperLang, translationLang=translationLang, batchSize=translationBatchSize, noRepeatNgramSize=translationNoRepeatNgramSize, numBeams=translationNumBeams, torchDtypeFloat16=translationTorchDtypeFloat16, usingBitsandbytes=translationUsingBitsandbytes)
|
|
|
946 |
mt5_models = app_config.get_model_names("mt5")
|
947 |
ALMA_models = app_config.get_model_names("ALMA")
|
948 |
madlad400_models = app_config.get_model_names("madlad400")
|
949 |
+
seamless_models = app_config.get_model_names("seamless")
|
950 |
if not torch.cuda.is_available(): # Loading only quantized or models with medium-low parameters in an environment without GPU support.
|
951 |
nllb_models = list(filter(lambda nllb: any(name in nllb for name in ["-600M", "-1.3B", "-3.3B-ct2"]), nllb_models))
|
952 |
m2m100_models = list(filter(lambda m2m100: "12B" not in m2m100, m2m100_models))
|
|
|
977 |
gr.Dropdown(label="madlad400 - Model (for translate)", choices=madlad400_models, elem_id="madlad400ModelName"),
|
978 |
gr.Dropdown(label="madlad400 - Language", choices=sorted(get_lang_m2m100_names()), elem_id="madlad400LangName"),
|
979 |
}
|
980 |
+
common_seamless_inputs = lambda : {
|
981 |
+
gr.Dropdown(label="seamless - Model (for translate)", choices=seamless_models, elem_id="seamlessModelName"),
|
982 |
+
gr.Dropdown(label="seamless - Language", choices=sorted(get_lang_seamlessTx_names()), elem_id="seamlessLangName"),
|
983 |
+
}
|
984 |
|
985 |
common_translation_inputs = lambda : {
|
986 |
gr.Number(label="Translation - Batch Size", precision=0, value=app_config.translation_batch_size, elem_id="translationBatchSize"),
|
|
|
1067 |
with gr.Tab(label="ALMA") as simpleALMATab:
|
1068 |
with gr.Row():
|
1069 |
simpleInputDict.update(common_ALMA_inputs())
|
1070 |
+
with gr.Tab(label="madlad400") as simpleMadlad400Tab:
|
1071 |
with gr.Row():
|
1072 |
simpleInputDict.update(common_madlad400_inputs())
|
1073 |
+
with gr.Tab(label="seamless") as simpleSeamlessTab:
|
1074 |
+
with gr.Row():
|
1075 |
+
simpleInputDict.update(common_seamless_inputs())
|
1076 |
simpleM2M100Tab.select(fn=lambda: "m2m100", inputs = [], outputs= [simpleTranslateInput] )
|
1077 |
simpleNllbTab.select(fn=lambda: "nllb", inputs = [], outputs= [simpleTranslateInput] )
|
1078 |
simpleMT5Tab.select(fn=lambda: "mt5", inputs = [], outputs= [simpleTranslateInput] )
|
1079 |
simpleALMATab.select(fn=lambda: "ALMA", inputs = [], outputs= [simpleTranslateInput] )
|
1080 |
+
simpleMadlad400Tab.select(fn=lambda: "madlad400", inputs = [], outputs= [simpleTranslateInput] )
|
1081 |
+
simpleSeamlessTab.select(fn=lambda: "seamless", inputs = [], outputs= [simpleTranslateInput] )
|
1082 |
with gr.Column():
|
1083 |
with gr.Tab(label="URL") as simpleUrlTab:
|
1084 |
simpleInputDict.update({gr.Text(label="URL (YouTube, etc.)", elem_id = "urlData")})
|
|
|
1142 |
with gr.Tab(label="ALMA") as fullALMATab:
|
1143 |
with gr.Row():
|
1144 |
fullInputDict.update(common_ALMA_inputs())
|
1145 |
+
with gr.Tab(label="madlad400") as fullMadlad400Tab:
|
1146 |
with gr.Row():
|
1147 |
fullInputDict.update(common_madlad400_inputs())
|
1148 |
+
with gr.Tab(label="seamless") as fullSeamlessTab:
|
1149 |
+
with gr.Row():
|
1150 |
+
fullInputDict.update(common_seamless_inputs())
|
1151 |
fullM2M100Tab.select(fn=lambda: "m2m100", inputs = [], outputs= [fullTranslateInput] )
|
1152 |
fullNllbTab.select(fn=lambda: "nllb", inputs = [], outputs= [fullTranslateInput] )
|
1153 |
fullMT5Tab.select(fn=lambda: "mt5", inputs = [], outputs= [fullTranslateInput] )
|
1154 |
fullALMATab.select(fn=lambda: "ALMA", inputs = [], outputs= [fullTranslateInput] )
|
1155 |
+
fullMadlad400Tab.select(fn=lambda: "madlad400", inputs = [], outputs= [fullTranslateInput] )
|
1156 |
+
fullSeamlessTab.select(fn=lambda: "seamless", inputs = [], outputs= [fullTranslateInput] )
|
1157 |
with gr.Column():
|
1158 |
with gr.Tab(label="URL") as fullUrlTab:
|
1159 |
fullInputDict.update({gr.Text(label="URL (YouTube, etc.)", elem_id = "urlData")})
|
config.json5
CHANGED
@@ -269,6 +269,23 @@
|
|
269 |
"url": "jbochi/madlad400-10b-mt",
|
270 |
"type": "huggingface"
|
271 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
272 |
]
|
273 |
},
|
274 |
// Configuration options that will be used if they are not specified in the command line arguments.
|
|
|
269 |
"url": "jbochi/madlad400-10b-mt",
|
270 |
"type": "huggingface"
|
271 |
}
|
272 |
+
],
|
273 |
+
"seamless": [
|
274 |
+
//{
|
275 |
+
// "name": "hf-seamless-m4t-medium/facebook",
|
276 |
+
// "url": "facebook/hf-seamless-m4t-medium",
|
277 |
+
// "type": "huggingface"
|
278 |
+
//},
|
279 |
+
//{
|
280 |
+
// "name": "seamless-m4t-large/facebook",
|
281 |
+
// "url": "facebook/seamless-m4t-large",
|
282 |
+
// "type": "huggingface"
|
283 |
+
//},
|
284 |
+
{
|
285 |
+
"name": "seamless-m4t-v2-large/facebook",
|
286 |
+
"url": "facebook/seamless-m4t-v2-large",
|
287 |
+
"type": "huggingface"
|
288 |
+
}
|
289 |
]
|
290 |
},
|
291 |
// Configuration options that will be used if they are not specified in the command line arguments.
|
docs/translateModel.md
CHANGED
@@ -22,7 +22,7 @@ M2M100 is a multilingual translation model introduced by Facebook AI in October
|
|
22 |
|------|------------|------|---------------|---------------|
|
23 |
| [facebook/m2m100_418M](https://huggingface.co/facebook/m2m100_418M) | 418M | 1.94 GB | float32 | ≈2 GB |
|
24 |
| [facebook/m2m100_1.2B](https://huggingface.co/facebook/m2m100_1.2B) | 1.2B | 4.96 GB | float32 | ≈5 GB |
|
25 |
-
| [facebook/m2m100-12B-last-ckpt](https://huggingface.co/facebook/m2m100-12B-last-ckpt) | 12B | 47.2 GB | float32 | 22.1 GB (torch dtype in float16) |
|
26 |
|
27 |
## M2M100-CTranslate2
|
28 |
|
@@ -143,14 +143,14 @@ Text-to-speech translation (T2ST)
|
|
143 |
Text-to-text translation (T2TT)
|
144 |
Automatic speech recognition (ASR)
|
145 |
|
146 |
-
SeamlessM4T-v1 introduced by Seamless Communication team from Meta AI in Aug 2023. The paper is titled "`SeamlessM4T: Massively Multilingual & Multimodal Machine Translation`"([arXiv:2308.11596](https://arxiv.org/abs/2308.11596))
|
147 |
-
SeamlessM4T-v2 introduced by Seamless Communication team from Meta AI in Dec 2023. The paper is titled "`Seamless: Multilingual Expressive and Streaming Speech Translation`"([arXiv:2312.05187](https://arxiv.org/abs/2312.05187))
|
148 |
|
149 |
| Name | Parameters | Size | type/quantize | Required VRAM |
|
150 |
|------|------------|------|---------------|---------------|
|
151 |
| [facebook/hf-seamless-m4t-medium](https://huggingface.co/facebook/hf-seamless-m4t-medium) | 1.2B | 4.84 GB | float32 | N/A |
|
152 |
| [facebook/seamless-m4t-large](https://huggingface.co/facebook/seamless-m4t-large) | 2.3B | 11.4 GB | float32 | N/A |
|
153 |
-
| [facebook/seamless-m4t-v2-large](https://huggingface.co/facebook/seamless-m4t-v2-large) | 2.3B | 11.4 GB (safetensors:9.24 GB) | float32 |
|
154 |
|
155 |
|
156 |
# Options
|
|
|
22 |
|------|------------|------|---------------|---------------|
|
23 |
| [facebook/m2m100_418M](https://huggingface.co/facebook/m2m100_418M) | 418M | 1.94 GB | float32 | ≈2 GB |
|
24 |
| [facebook/m2m100_1.2B](https://huggingface.co/facebook/m2m100_1.2B) | 1.2B | 4.96 GB | float32 | ≈5 GB |
|
25 |
+
| [facebook/m2m100-12B-last-ckpt](https://huggingface.co/facebook/m2m100-12B-last-ckpt) | 12B | 47.2 GB | float32 | ≈22.1 GB (torch dtype in float16) |
|
26 |
|
27 |
## M2M100-CTranslate2
|
28 |
|
|
|
143 |
Text-to-text translation (T2TT)
|
144 |
Automatic speech recognition (ASR)
|
145 |
|
146 |
+
[SeamlessM4T-v1](https://huggingface.co/docs/transformers/main/en/model_doc/seamless_m4t) introduced by Seamless Communication team from Meta AI in Aug 2023. The paper is titled "`SeamlessM4T: Massively Multilingual & Multimodal Machine Translation`"([arXiv:2308.11596](https://arxiv.org/abs/2308.11596))
|
147 |
+
[SeamlessM4T-v2](https://huggingface.co/docs/transformers/main/en/model_doc/seamless_m4t_v2) introduced by Seamless Communication team from Meta AI in Dec 2023. The paper is titled "`Seamless: Multilingual Expressive and Streaming Speech Translation`"([arXiv:2312.05187](https://arxiv.org/abs/2312.05187))
|
148 |
|
149 |
| Name | Parameters | Size | type/quantize | Required VRAM |
|
150 |
|------|------------|------|---------------|---------------|
|
151 |
| [facebook/hf-seamless-m4t-medium](https://huggingface.co/facebook/hf-seamless-m4t-medium) | 1.2B | 4.84 GB | float32 | N/A |
|
152 |
| [facebook/seamless-m4t-large](https://huggingface.co/facebook/seamless-m4t-large) | 2.3B | 11.4 GB | float32 | N/A |
|
153 |
+
| [facebook/seamless-m4t-v2-large](https://huggingface.co/facebook/seamless-m4t-v2-large) | 2.3B | 11.4 GB (safetensors:9.24 GB) | float32 | ≈9.2 GB |
|
154 |
|
155 |
|
156 |
# Options
|
src/config.py
CHANGED
@@ -50,7 +50,7 @@ class VadInitialPromptMode(Enum):
|
|
50 |
return None
|
51 |
|
52 |
class ApplicationConfig:
|
53 |
-
def __init__(self, models: Dict[Literal["whisper", "m2m100", "nllb", "mt5", "ALMA", "madlad400"], List[ModelConfig]],
|
54 |
input_audio_max_duration: int = 600, share: bool = False, server_name: str = None, server_port: int = 7860,
|
55 |
queue_concurrency_count: int = 1, delete_uploaded_files: bool = True,
|
56 |
whisper_implementation: str = "whisper", default_model_name: str = "medium",
|
@@ -185,7 +185,7 @@ class ApplicationConfig:
|
|
185 |
# Load using json5
|
186 |
data = json5.load(f)
|
187 |
data_models = data.pop("models", [])
|
188 |
-
models: Dict[Literal["whisper", "m2m100", "nllb", "mt5", "ALMA", "madlad400"], List[ModelConfig]] = {
|
189 |
key: [ModelConfig(**item) for item in value]
|
190 |
for key, value in data_models.items()
|
191 |
}
|
|
|
50 |
return None
|
51 |
|
52 |
class ApplicationConfig:
|
53 |
+
def __init__(self, models: Dict[Literal["whisper", "m2m100", "nllb", "mt5", "ALMA", "madlad400", "seamless"], List[ModelConfig]],
|
54 |
input_audio_max_duration: int = 600, share: bool = False, server_name: str = None, server_port: int = 7860,
|
55 |
queue_concurrency_count: int = 1, delete_uploaded_files: bool = True,
|
56 |
whisper_implementation: str = "whisper", default_model_name: str = "medium",
|
|
|
185 |
# Load using json5
|
186 |
data = json5.load(f)
|
187 |
data_models = data.pop("models", [])
|
188 |
+
models: Dict[Literal["whisper", "m2m100", "nllb", "mt5", "ALMA", "madlad400", "seamless"], List[ModelConfig]] = {
|
189 |
key: [ModelConfig(**item) for item in value]
|
190 |
for key, value in data_models.items()
|
191 |
}
|
src/translation/translationLangs.py
CHANGED
@@ -9,23 +9,36 @@ class Lang():
|
|
9 |
return f"code:{self.code}, name:{self.names}"
|
10 |
|
11 |
class TranslationLang():
|
12 |
-
def __init__(self,
|
13 |
-
self.nllb
|
14 |
-
self.whisper =
|
15 |
self.m2m100 = None
|
|
|
16 |
|
17 |
-
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
def __repr__(self):
|
22 |
result = ""
|
23 |
-
if self.nllb
|
24 |
result += f"NLLB={self.nllb} "
|
25 |
-
if self.whisper
|
26 |
result += f"WHISPER={self.whisper} "
|
27 |
-
if self.m2m100
|
28 |
-
result += f"
|
|
|
|
|
29 |
return f"Language {result}"
|
30 |
|
31 |
"""
|
@@ -49,211 +62,211 @@ https://huggingface.co/facebook/m2m100_1.2B
|
|
49 |
The available languages for m2m100 and whisper are almost identical. Most of the codes correspond to the ISO 639-1 standard. For detailed information, please refer to the official documentation provided.
|
50 |
"""
|
51 |
TranslationLangs = [
|
52 |
-
TranslationLang(
|
53 |
-
TranslationLang(
|
54 |
-
TranslationLang(
|
55 |
-
TranslationLang(
|
56 |
-
TranslationLang(
|
57 |
-
TranslationLang(
|
58 |
-
TranslationLang(
|
59 |
-
TranslationLang(
|
60 |
-
TranslationLang(
|
61 |
-
TranslationLang(
|
62 |
-
TranslationLang(
|
63 |
-
TranslationLang(
|
64 |
-
TranslationLang(
|
65 |
-
TranslationLang(
|
66 |
-
TranslationLang(
|
67 |
-
TranslationLang(
|
68 |
-
TranslationLang(
|
69 |
-
TranslationLang(
|
70 |
-
TranslationLang(
|
71 |
-
TranslationLang(
|
72 |
-
TranslationLang(
|
73 |
-
TranslationLang(
|
74 |
-
TranslationLang(
|
75 |
-
TranslationLang(
|
76 |
-
TranslationLang(
|
77 |
-
TranslationLang(
|
78 |
-
TranslationLang(
|
79 |
-
TranslationLang(
|
80 |
-
TranslationLang(
|
81 |
-
TranslationLang(
|
82 |
-
TranslationLang(
|
83 |
-
TranslationLang(
|
84 |
-
TranslationLang(
|
85 |
-
TranslationLang(
|
86 |
-
TranslationLang(
|
87 |
-
TranslationLang(
|
88 |
-
TranslationLang(
|
89 |
-
TranslationLang(
|
90 |
-
TranslationLang(
|
91 |
-
TranslationLang(
|
92 |
-
TranslationLang(
|
93 |
-
TranslationLang(
|
94 |
-
TranslationLang(
|
95 |
-
TranslationLang(
|
96 |
-
TranslationLang(
|
97 |
-
TranslationLang(
|
98 |
-
TranslationLang(
|
99 |
-
TranslationLang(
|
100 |
-
TranslationLang(
|
101 |
-
TranslationLang(
|
102 |
-
TranslationLang(
|
103 |
-
TranslationLang(
|
104 |
-
TranslationLang(
|
105 |
-
TranslationLang(
|
106 |
-
TranslationLang(
|
107 |
-
TranslationLang(
|
108 |
-
TranslationLang(
|
109 |
-
TranslationLang(
|
110 |
-
TranslationLang(
|
111 |
-
TranslationLang(
|
112 |
-
TranslationLang(
|
113 |
-
TranslationLang(
|
114 |
-
TranslationLang(
|
115 |
-
TranslationLang(
|
116 |
-
TranslationLang(
|
117 |
-
TranslationLang(
|
118 |
-
TranslationLang(
|
119 |
-
TranslationLang(
|
120 |
-
TranslationLang(
|
121 |
-
TranslationLang(
|
122 |
-
TranslationLang(
|
123 |
-
TranslationLang(
|
124 |
-
TranslationLang(
|
125 |
-
TranslationLang(
|
126 |
-
TranslationLang(
|
127 |
-
TranslationLang(
|
128 |
-
TranslationLang(
|
129 |
-
TranslationLang(
|
130 |
-
TranslationLang(
|
131 |
-
TranslationLang(
|
132 |
-
TranslationLang(
|
133 |
-
TranslationLang(
|
134 |
-
TranslationLang(
|
135 |
-
TranslationLang(
|
136 |
-
TranslationLang(
|
137 |
-
TranslationLang(
|
138 |
-
TranslationLang(
|
139 |
-
TranslationLang(
|
140 |
-
TranslationLang(
|
141 |
-
TranslationLang(
|
142 |
-
TranslationLang(
|
143 |
-
TranslationLang(
|
144 |
-
TranslationLang(
|
145 |
-
TranslationLang(
|
146 |
-
TranslationLang(
|
147 |
-
TranslationLang(
|
148 |
-
TranslationLang(
|
149 |
-
TranslationLang(
|
150 |
-
TranslationLang(
|
151 |
-
TranslationLang(
|
152 |
-
TranslationLang(
|
153 |
-
TranslationLang(
|
154 |
-
TranslationLang(
|
155 |
-
TranslationLang(
|
156 |
-
TranslationLang(
|
157 |
-
TranslationLang(
|
158 |
-
TranslationLang(
|
159 |
-
TranslationLang(
|
160 |
-
TranslationLang(
|
161 |
-
TranslationLang(
|
162 |
-
TranslationLang(
|
163 |
-
TranslationLang(
|
164 |
-
TranslationLang(
|
165 |
-
TranslationLang(
|
166 |
-
TranslationLang(
|
167 |
-
TranslationLang(
|
168 |
-
TranslationLang(
|
169 |
-
TranslationLang(
|
170 |
-
TranslationLang(
|
171 |
-
TranslationLang(
|
172 |
-
TranslationLang(
|
173 |
-
TranslationLang(
|
174 |
-
TranslationLang(
|
175 |
-
TranslationLang(
|
176 |
-
TranslationLang(
|
177 |
-
TranslationLang(
|
178 |
-
TranslationLang(
|
179 |
-
TranslationLang(
|
180 |
-
TranslationLang(
|
181 |
-
TranslationLang(
|
182 |
-
TranslationLang(
|
183 |
-
TranslationLang(
|
184 |
-
TranslationLang(
|
185 |
-
TranslationLang(
|
186 |
-
TranslationLang(
|
187 |
-
TranslationLang(
|
188 |
-
TranslationLang(
|
189 |
-
TranslationLang(
|
190 |
-
TranslationLang(
|
191 |
-
TranslationLang(
|
192 |
-
TranslationLang(
|
193 |
-
TranslationLang(
|
194 |
-
TranslationLang(
|
195 |
-
TranslationLang(
|
196 |
-
TranslationLang(
|
197 |
-
TranslationLang(
|
198 |
-
TranslationLang(
|
199 |
-
TranslationLang(
|
200 |
-
TranslationLang(
|
201 |
-
TranslationLang(
|
202 |
-
TranslationLang(
|
203 |
-
TranslationLang(
|
204 |
-
TranslationLang(
|
205 |
-
TranslationLang(
|
206 |
-
TranslationLang(
|
207 |
-
TranslationLang(
|
208 |
-
TranslationLang(
|
209 |
-
TranslationLang(
|
210 |
-
TranslationLang(
|
211 |
-
TranslationLang(
|
212 |
-
TranslationLang(
|
213 |
-
TranslationLang(
|
214 |
-
TranslationLang(
|
215 |
-
TranslationLang(
|
216 |
-
TranslationLang(
|
217 |
-
TranslationLang(
|
218 |
-
TranslationLang(
|
219 |
-
TranslationLang(
|
220 |
-
TranslationLang(
|
221 |
-
TranslationLang(
|
222 |
-
TranslationLang(
|
223 |
-
TranslationLang(
|
224 |
-
TranslationLang(
|
225 |
-
TranslationLang(
|
226 |
-
TranslationLang(
|
227 |
-
TranslationLang(
|
228 |
-
TranslationLang(
|
229 |
-
TranslationLang(
|
230 |
-
TranslationLang(
|
231 |
-
TranslationLang(
|
232 |
-
TranslationLang(
|
233 |
-
TranslationLang(
|
234 |
-
TranslationLang(
|
235 |
-
TranslationLang(
|
236 |
-
TranslationLang(
|
237 |
-
TranslationLang(
|
238 |
-
TranslationLang(
|
239 |
-
TranslationLang(
|
240 |
-
TranslationLang(
|
241 |
-
TranslationLang(
|
242 |
-
TranslationLang(
|
243 |
-
TranslationLang(
|
244 |
-
TranslationLang(
|
245 |
-
TranslationLang(
|
246 |
-
TranslationLang(
|
247 |
-
TranslationLang(
|
248 |
-
TranslationLang(
|
249 |
-
TranslationLang(
|
250 |
-
TranslationLang(
|
251 |
-
TranslationLang(
|
252 |
-
TranslationLang(
|
253 |
-
TranslationLang(
|
254 |
-
TranslationLang(
|
255 |
-
TranslationLang(
|
256 |
-
TranslationLang(None,
|
257 |
]
|
258 |
|
259 |
|
@@ -263,6 +276,8 @@ _TO_LANG_NAME_M2M100 = {name.lower(): language for language in TranslationLangs
|
|
263 |
|
264 |
_TO_LANG_NAME_WHISPER = {name.lower(): language for language in TranslationLangs if language.whisper is not None for name in language.whisper.names}
|
265 |
|
|
|
|
|
266 |
_TO_LANG_CODE_WHISPER = {language.whisper.code.lower(): language for language in TranslationLangs if language.whisper is not None and len(language.whisper.code) > 0}
|
267 |
|
268 |
|
@@ -278,6 +293,10 @@ def get_lang_from_whisper_name(whisperName, default=None) -> TranslationLang:
|
|
278 |
"""Return the TranslationLang from the lang_name_whisper name."""
|
279 |
return _TO_LANG_NAME_WHISPER.get(whisperName.lower() if whisperName else None, default)
|
280 |
|
|
|
|
|
|
|
|
|
281 |
def get_lang_from_whisper_code(whisperCode, default=None) -> TranslationLang:
|
282 |
"""Return the TranslationLang from the lang_code_whisper."""
|
283 |
return _TO_LANG_CODE_WHISPER.get(whisperCode, default)
|
@@ -290,6 +309,10 @@ def get_lang_m2m100_names(codes = []):
|
|
290 |
"""Return a list of m2m100 language names."""
|
291 |
return list({name.lower(): None for language in TranslationLangs if language.m2m100 is not None and (len(codes) == 0 or any(code in language.m2m100.code for code in codes)) for name in language.m2m100.names}.keys())
|
292 |
|
|
|
|
|
|
|
|
|
293 |
def get_lang_whisper_names():
|
294 |
"""Return a list of whisper language names."""
|
295 |
return list(_TO_LANG_NAME_WHISPER.keys())
|
|
|
9 |
return f"code:{self.code}, name:{self.names}"
|
10 |
|
11 |
class TranslationLang():
|
12 |
+
def __init__(self, code: str, name: str):
|
13 |
+
self.nllb = Lang(code, name)
|
14 |
+
self.whisper = None
|
15 |
self.m2m100 = None
|
16 |
+
self.seamlessTx = None
|
17 |
|
18 |
+
def Whisper(self, code: str, *names: str):
|
19 |
+
self.whisper = Lang(code, *names)
|
20 |
+
if self.m2m100 is None:
|
21 |
+
self.m2m100 = self.whisper
|
22 |
+
return self
|
23 |
+
|
24 |
+
def M2M100(self, code: str, name: str):
|
25 |
+
self.m2m100 = Lang(code, name)
|
26 |
+
return self
|
27 |
+
|
28 |
+
def SeamlessTx(self, code: str, name: str):
|
29 |
+
self.seamlessTx = Lang(code, name)
|
30 |
+
return self
|
31 |
|
32 |
def __repr__(self):
|
33 |
result = ""
|
34 |
+
if self.nllb:
|
35 |
result += f"NLLB={self.nllb} "
|
36 |
+
if self.whisper:
|
37 |
result += f"WHISPER={self.whisper} "
|
38 |
+
if self.m2m100:
|
39 |
+
result += f"M2M100={self.m2m100} "
|
40 |
+
if self.seamlessTx:
|
41 |
+
result += f"SeamlessTx={self.seamlessTx} "
|
42 |
return f"Language {result}"
|
43 |
|
44 |
"""
|
|
|
62 |
The available languages for m2m100 and whisper are almost identical. Most of the codes correspond to the ISO 639-1 standard. For detailed information, please refer to the official documentation provided.
|
63 |
"""
|
64 |
TranslationLangs = [
|
65 |
+
TranslationLang("ace_Arab", "Acehnese (Arabic script)"),
|
66 |
+
TranslationLang("ace_Latn", "Acehnese (Latin script)"),
|
67 |
+
TranslationLang("acm_Arab", "Mesopotamian Arabic").Whisper("ar", "Arabic"),
|
68 |
+
TranslationLang("acq_Arab", "Ta’izzi-Adeni Arabic").Whisper("ar", "Arabic"),
|
69 |
+
TranslationLang("aeb_Arab", "Tunisian Arabic"),
|
70 |
+
TranslationLang("afr_Latn", "Afrikaans").Whisper("af", "Afrikaans").SeamlessTx("afr", "Afrikaans"),
|
71 |
+
TranslationLang("ajp_Arab", "South Levantine Arabic").Whisper("ar", "Arabic"),
|
72 |
+
TranslationLang("aka_Latn", "Akan"),
|
73 |
+
TranslationLang("amh_Ethi", "Amharic").Whisper("am", "Amharic").SeamlessTx("amh", "Amharic"),
|
74 |
+
TranslationLang("apc_Arab", "North Levantine Arabic").Whisper("ar", "Arabic"),
|
75 |
+
TranslationLang("arb_Arab", "Modern Standard Arabic").Whisper("ar", "Arabic").SeamlessTx("arb", "Modern Standard Arabic"),
|
76 |
+
TranslationLang("arb_Latn", "Modern Standard Arabic (Romanized)"),
|
77 |
+
TranslationLang("ars_Arab", "Najdi Arabic").Whisper("ar", "Arabic"),
|
78 |
+
TranslationLang("ary_Arab", "Moroccan Arabic").Whisper("ar", "Arabic").SeamlessTx("ary", "Moroccan Arabic"),
|
79 |
+
TranslationLang("arz_Arab", "Egyptian Arabic").Whisper("ar", "Arabic").SeamlessTx("arz", "Egyptian Arabic"),
|
80 |
+
TranslationLang("asm_Beng", "Assamese").Whisper("as", "Assamese").SeamlessTx("asm", "Assamese"),
|
81 |
+
TranslationLang("ast_Latn", "Asturian").M2M100("ast", "Asturian"),
|
82 |
+
TranslationLang("awa_Deva", "Awadhi"),
|
83 |
+
TranslationLang("ayr_Latn", "Central Aymara"),
|
84 |
+
TranslationLang("azb_Arab", "South Azerbaijani").Whisper("az", "Azerbaijani"),
|
85 |
+
TranslationLang("azj_Latn", "North Azerbaijani").Whisper("az", "Azerbaijani").SeamlessTx("azj", "North Azerbaijani"),
|
86 |
+
TranslationLang("bak_Cyrl", "Bashkir").Whisper("ba", "Bashkir"),
|
87 |
+
TranslationLang("bam_Latn", "Bambara"),
|
88 |
+
TranslationLang("ban_Latn", "Balinese"),
|
89 |
+
TranslationLang("bel_Cyrl", "Belarusian").Whisper("be", "Belarusian").SeamlessTx("bel", "Belarusian"),
|
90 |
+
TranslationLang("bem_Latn", "Bemba"),
|
91 |
+
TranslationLang("ben_Beng", "Bengali").Whisper("bn", "Bengali").SeamlessTx("ben", "Bengali"),
|
92 |
+
TranslationLang("bho_Deva", "Bhojpuri"),
|
93 |
+
TranslationLang("bjn_Arab", "Banjar (Arabic script)"),
|
94 |
+
TranslationLang("bjn_Latn", "Banjar (Latin script)"),
|
95 |
+
TranslationLang("bod_Tibt", "Standard Tibetan").Whisper("bo", "Tibetan"),
|
96 |
+
TranslationLang("bos_Latn", "Bosnian").Whisper("bs", "Bosnian").SeamlessTx("bos", "Bosnian"),
|
97 |
+
TranslationLang("bug_Latn", "Buginese"),
|
98 |
+
TranslationLang("bul_Cyrl", "Bulgarian").Whisper("bg", "Bulgarian").SeamlessTx("bul", "Bulgarian"),
|
99 |
+
TranslationLang("cat_Latn", "Catalan").Whisper("ca", "Catalan", "valencian").SeamlessTx("cat", "Catalan"),
|
100 |
+
TranslationLang("ceb_Latn", "Cebuano").M2M100("ceb", "Cebuano").SeamlessTx("ceb", "Cebuano"),
|
101 |
+
TranslationLang("ces_Latn", "Czech").Whisper("cs", "Czech").SeamlessTx("ces", "Czech"),
|
102 |
+
TranslationLang("cjk_Latn", "Chokwe"),
|
103 |
+
TranslationLang("ckb_Arab", "Central Kurdish").SeamlessTx("ckb", "Central Kurdish"),
|
104 |
+
TranslationLang("crh_Latn", "Crimean Tatar"),
|
105 |
+
TranslationLang("cym_Latn", "Welsh").Whisper("cy", "Welsh").SeamlessTx("cym", "Welsh"),
|
106 |
+
TranslationLang("dan_Latn", "Danish").Whisper("da", "Danish").SeamlessTx("dan", "Danish"),
|
107 |
+
TranslationLang("deu_Latn", "German").Whisper("de", "German").SeamlessTx("deu", "German"),
|
108 |
+
TranslationLang("dik_Latn", "Southwestern Dinka"),
|
109 |
+
TranslationLang("dyu_Latn", "Dyula"),
|
110 |
+
TranslationLang("dzo_Tibt", "Dzongkha"),
|
111 |
+
TranslationLang("ell_Grek", "Greek").Whisper("el", "Greek").SeamlessTx("ell", "Greek"),
|
112 |
+
TranslationLang("eng_Latn", "English").Whisper("en", "English").SeamlessTx("eng", "English"),
|
113 |
+
TranslationLang("epo_Latn", "Esperanto"),
|
114 |
+
TranslationLang("est_Latn", "Estonian").Whisper("et", "Estonian").SeamlessTx("est", "Estonian"),
|
115 |
+
TranslationLang("eus_Latn", "Basque").Whisper("eu", "Basque").SeamlessTx("eus", "Basque"),
|
116 |
+
TranslationLang("ewe_Latn", "Ewe"),
|
117 |
+
TranslationLang("fao_Latn", "Faroese").Whisper("fo", "Faroese"),
|
118 |
+
TranslationLang("fij_Latn", "Fijian"),
|
119 |
+
TranslationLang("fin_Latn", "Finnish").Whisper("fi", "Finnish").SeamlessTx("fin", "Finnish"),
|
120 |
+
TranslationLang("fon_Latn", "Fon"),
|
121 |
+
TranslationLang("fra_Latn", "French").Whisper("fr", "French").SeamlessTx("fra", "French"),
|
122 |
+
TranslationLang("fur_Latn", "Friulian"),
|
123 |
+
TranslationLang("fuv_Latn", "Nigerian Fulfulde").M2M100("ff", "Fulah").SeamlessTx("fuv", "Nigerian Fulfulde"),
|
124 |
+
TranslationLang("gla_Latn", "Scottish Gaelic").M2M100("gd", "Scottish Gaelic"),
|
125 |
+
TranslationLang("gle_Latn", "Irish").M2M100("ga", "Irish").SeamlessTx("gle", "Irish"),
|
126 |
+
TranslationLang("glg_Latn", "Galician").Whisper("gl", "Galician").SeamlessTx("glg", "Galician"),
|
127 |
+
TranslationLang("grn_Latn", "Guarani"),
|
128 |
+
TranslationLang("guj_Gujr", "Gujarati").Whisper("gu", "Gujarati").SeamlessTx("guj", "Gujarati"),
|
129 |
+
TranslationLang("hat_Latn", "Haitian Creole").Whisper("ht", "Haitian creole", "haitian"),
|
130 |
+
TranslationLang("hau_Latn", "Hausa").Whisper("ha", "Hausa"),
|
131 |
+
TranslationLang("heb_Hebr", "Hebrew").Whisper("he", "Hebrew").SeamlessTx("heb", "Hebrew"),
|
132 |
+
TranslationLang("hin_Deva", "Hindi").Whisper("hi", "Hindi").SeamlessTx("hin", "Hindi"),
|
133 |
+
TranslationLang("hne_Deva", "Chhattisgarhi"),
|
134 |
+
TranslationLang("hrv_Latn", "Croatian").Whisper("hr", "Croatian").SeamlessTx("hrv", "Croatian"),
|
135 |
+
TranslationLang("hun_Latn", "Hungarian").Whisper("hu", "Hungarian").SeamlessTx("hun", "Hungarian"),
|
136 |
+
TranslationLang("hye_Armn", "Armenian").Whisper("hy", "Armenian").SeamlessTx("hye", "Armenian"),
|
137 |
+
TranslationLang("ibo_Latn", "Igbo").M2M100("ig", "Igbo").SeamlessTx("ibo", "Igbo"),
|
138 |
+
TranslationLang("ilo_Latn", "Ilocano").M2M100("ilo", "Iloko"),
|
139 |
+
TranslationLang("ind_Latn", "Indonesian").Whisper("id", "Indonesian").SeamlessTx("ind", "Indonesian"),
|
140 |
+
TranslationLang("isl_Latn", "Icelandic").Whisper("is", "Icelandic").SeamlessTx("isl", "Icelandic"),
|
141 |
+
TranslationLang("ita_Latn", "Italian").Whisper("it", "Italian").SeamlessTx("ita", "Italian"),
|
142 |
+
TranslationLang("jav_Latn", "Javanese").Whisper("jw", "Javanese").M2M100("jv", "Javanese").SeamlessTx("jav", "Javanese"),
|
143 |
+
TranslationLang("jpn_Jpan", "Japanese").Whisper("ja", "Japanese").SeamlessTx("jpn", "Japanese"),
|
144 |
+
TranslationLang("kab_Latn", "Kabyle"),
|
145 |
+
TranslationLang("kac_Latn", "Jingpho"),
|
146 |
+
TranslationLang("kam_Latn", "Kamba"),
|
147 |
+
TranslationLang("kan_Knda", "Kannada").Whisper("kn", "Kannada").SeamlessTx("kan", "Kannada"),
|
148 |
+
TranslationLang("kas_Arab", "Kashmiri (Arabic script)"),
|
149 |
+
TranslationLang("kas_Deva", "Kashmiri (Devanagari script)"),
|
150 |
+
TranslationLang("kat_Geor", "Georgian").Whisper("ka", "Georgian").SeamlessTx("kat", "Georgian"),
|
151 |
+
TranslationLang("knc_Arab", "Central Kanuri (Arabic script)"),
|
152 |
+
TranslationLang("knc_Latn", "Central Kanuri (Latin script)"),
|
153 |
+
TranslationLang("kaz_Cyrl", "Kazakh").Whisper("kk", "Kazakh").SeamlessTx("kaz", "Kazakh"),
|
154 |
+
TranslationLang("kbp_Latn", "Kabiyè"),
|
155 |
+
TranslationLang("kea_Latn", "Kabuverdianu"),
|
156 |
+
TranslationLang("khm_Khmr", "Khmer").Whisper("km", "Khmer").SeamlessTx("khm", "Khmer"),
|
157 |
+
TranslationLang("kik_Latn", "Kikuyu"),
|
158 |
+
TranslationLang("kin_Latn", "Kinyarwanda"),
|
159 |
+
TranslationLang("kir_Cyrl", "Kyrgyz").SeamlessTx("kir", "Kyrgyz"),
|
160 |
+
TranslationLang("kmb_Latn", "Kimbundu"),
|
161 |
+
TranslationLang("kmr_Latn", "Northern Kurdish"),
|
162 |
+
TranslationLang("kon_Latn", "Kikongo"),
|
163 |
+
TranslationLang("kor_Hang", "Korean").Whisper("ko", "Korean").SeamlessTx("kor", "Korean"),
|
164 |
+
TranslationLang("lao_Laoo", "Lao").Whisper("lo", "Lao").SeamlessTx("lao", "Lao"),
|
165 |
+
TranslationLang("lij_Latn", "Ligurian"),
|
166 |
+
TranslationLang("lim_Latn", "Limburgish"),
|
167 |
+
TranslationLang("lin_Latn", "Lingala").Whisper("ln", "Lingala"),
|
168 |
+
TranslationLang("lit_Latn", "Lithuanian").Whisper("lt", "Lithuanian").SeamlessTx("lit", "Lithuanian"),
|
169 |
+
TranslationLang("lmo_Latn", "Lombard"),
|
170 |
+
TranslationLang("ltg_Latn", "Latgalian"),
|
171 |
+
TranslationLang("ltz_Latn", "Luxembourgish").Whisper("lb", "Luxembourgish", "letzeburgesch"),
|
172 |
+
TranslationLang("lua_Latn", "Luba-Kasai"),
|
173 |
+
TranslationLang("lug_Latn", "Ganda").M2M100("lg", "Ganda").SeamlessTx("lug", "Ganda"),
|
174 |
+
TranslationLang("luo_Latn", "Luo").SeamlessTx("luo", "Luo"),
|
175 |
+
TranslationLang("lus_Latn", "Mizo"),
|
176 |
+
TranslationLang("lvs_Latn", "Standard Latvian").Whisper("lv", "Latvian").SeamlessTx("lvs", "Standard Latvian"),
|
177 |
+
TranslationLang("mag_Deva", "Magahi"),
|
178 |
+
TranslationLang("mai_Deva", "Maithili").SeamlessTx("mai", "Maithili"),
|
179 |
+
TranslationLang("mal_Mlym", "Malayalam").Whisper("ml", "Malayalam").SeamlessTx("mal", "Malayalam"),
|
180 |
+
TranslationLang("mar_Deva", "Marathi").Whisper("mr", "Marathi").SeamlessTx("mar", "Marathi"),
|
181 |
+
TranslationLang("min_Arab", "Minangkabau (Arabic script)"),
|
182 |
+
TranslationLang("min_Latn", "Minangkabau (Latin script)"),
|
183 |
+
TranslationLang("mkd_Cyrl", "Macedonian").Whisper("mk", "Macedonian").SeamlessTx("mkd", "Macedonian"),
|
184 |
+
TranslationLang("plt_Latn", "Plateau Malagasy").Whisper("mg", "Malagasy"),
|
185 |
+
TranslationLang("mlt_Latn", "Maltese").Whisper("mt", "Maltese").SeamlessTx("mlt", "Maltese"),
|
186 |
+
TranslationLang("mni_Beng", "Meitei (Bengali script)").SeamlessTx("mni", "Meitei"),
|
187 |
+
TranslationLang("khk_Cyrl", "Halh Mongolian").Whisper("mn", "Mongolian").SeamlessTx("khk", "Halh Mongolian"),
|
188 |
+
TranslationLang("mos_Latn", "Mossi"),
|
189 |
+
TranslationLang("mri_Latn", "Maori").Whisper("mi", "Maori"),
|
190 |
+
TranslationLang("mya_Mymr", "Burmese").Whisper("my", "Myanmar", "burmese").SeamlessTx("mya", "Burmese"),
|
191 |
+
TranslationLang("nld_Latn", "Dutch").Whisper("nl", "Dutch", "flemish").SeamlessTx("nld", "Dutch"),
|
192 |
+
TranslationLang("nno_Latn", "Norwegian Nynorsk").Whisper("nn", "Nynorsk").SeamlessTx("nno", "Norwegian Nynorsk"),
|
193 |
+
TranslationLang("nob_Latn", "Norwegian Bokmål").Whisper("no", "Norwegian").SeamlessTx("nob", "Norwegian Bokmål"),
|
194 |
+
TranslationLang("npi_Deva", "Nepali").Whisper("ne", "Nepali").SeamlessTx("npi", "Nepali"),
|
195 |
+
TranslationLang("nso_Latn", "Northern Sotho").M2M100("ns", "Northern Sotho"),
|
196 |
+
TranslationLang("nus_Latn", "Nuer"),
|
197 |
+
TranslationLang("nya_Latn", "Nyanja").SeamlessTx("nya", "Nyanja"),
|
198 |
+
TranslationLang("oci_Latn", "Occitan").Whisper("oc", "Occitan"),
|
199 |
+
TranslationLang("gaz_Latn", "West Central Oromo").SeamlessTx("gaz", "West Central Oromo"),
|
200 |
+
TranslationLang("ory_Orya", "Odia").M2M100("or", "Oriya").SeamlessTx("ory", "Odia"),
|
201 |
+
TranslationLang("pag_Latn", "Pangasinan"),
|
202 |
+
TranslationLang("pan_Guru", "Eastern Panjabi").Whisper("pa", "Punjabi", "panjabi").SeamlessTx("pan", "Punjabi"),
|
203 |
+
TranslationLang("pap_Latn", "Papiamento"),
|
204 |
+
TranslationLang("pes_Arab", "Western Persian").Whisper("fa", "Persian").SeamlessTx("pes", "Western Persian"),
|
205 |
+
TranslationLang("pol_Latn", "Polish").Whisper("pl", "Polish").SeamlessTx("pol", "Polish"),
|
206 |
+
TranslationLang("por_Latn", "Portuguese").Whisper("pt", "Portuguese").SeamlessTx("por", "Portuguese"),
|
207 |
+
TranslationLang("prs_Arab", "Dari"),
|
208 |
+
TranslationLang("pbt_Arab", "Southern Pashto").Whisper("ps", "Pashto", "pushto").SeamlessTx("pbt", "Southern Pashto"),
|
209 |
+
TranslationLang("quy_Latn", "Ayacucho Quechua"),
|
210 |
+
TranslationLang("ron_Latn", "Romanian").Whisper("ro", "Romanian", "moldavian", "moldovan").SeamlessTx("ron", "Romanian"),
|
211 |
+
TranslationLang("run_Latn", "Rundi"),
|
212 |
+
TranslationLang("rus_Cyrl", "Russian").Whisper("ru", "Russian").SeamlessTx("rus", "Russian"),
|
213 |
+
TranslationLang("sag_Latn", "Sango"),
|
214 |
+
TranslationLang("san_Deva", "Sanskrit").Whisper("sa", "Sanskrit"),
|
215 |
+
TranslationLang("sat_Olck", "Santali"),
|
216 |
+
TranslationLang("scn_Latn", "Sicilian"),
|
217 |
+
TranslationLang("shn_Mymr", "Shan"),
|
218 |
+
TranslationLang("sin_Sinh", "Sinhala").Whisper("si", "Sinhala", "sinhalese"),
|
219 |
+
TranslationLang("slk_Latn", "Slovak").Whisper("sk", "Slovak").SeamlessTx("slk", "Slovak"),
|
220 |
+
TranslationLang("slv_Latn", "Slovenian").Whisper("sl", "Slovenian").SeamlessTx("slv", "Slovenian"),
|
221 |
+
TranslationLang("smo_Latn", "Samoan"),
|
222 |
+
TranslationLang("sna_Latn", "Shona").Whisper("sn", "Shona").SeamlessTx("sna", "Shona"),
|
223 |
+
TranslationLang("snd_Arab", "Sindhi").Whisper("sd", "Sindhi").SeamlessTx("snd", "Sindhi"),
|
224 |
+
TranslationLang("som_Latn", "Somali").Whisper("so", "Somali").SeamlessTx("som", "Somali"),
|
225 |
+
TranslationLang("sot_Latn", "Southern Sotho"),
|
226 |
+
TranslationLang("spa_Latn", "Spanish").Whisper("es", "Spanish", "castilian").SeamlessTx("spa", "Spanish"),
|
227 |
+
TranslationLang("als_Latn", "Tosk Albanian").Whisper("sq", "Albanian"),
|
228 |
+
TranslationLang("srd_Latn", "Sardinian"),
|
229 |
+
TranslationLang("srp_Cyrl", "Serbian").Whisper("sr", "Serbian").SeamlessTx("srp", "Serbian"),
|
230 |
+
TranslationLang("ssw_Latn", "Swati").M2M100("ss", "Swati"),
|
231 |
+
TranslationLang("sun_Latn", "Sundanese").Whisper("su", "Sundanese"),
|
232 |
+
TranslationLang("swe_Latn", "Swedish").Whisper("sv", "Swedish").SeamlessTx("swe", "Swedish"),
|
233 |
+
TranslationLang("swh_Latn", "Swahili").Whisper("sw", "Swahili").SeamlessTx("swh", "Swahili"),
|
234 |
+
TranslationLang("szl_Latn", "Silesian"),
|
235 |
+
TranslationLang("tam_Taml", "Tamil").Whisper("ta", "Tamil").SeamlessTx("tam", "Tamil"),
|
236 |
+
TranslationLang("tat_Cyrl", "Tatar").Whisper("tt", "Tatar"),
|
237 |
+
TranslationLang("tel_Telu", "Telugu").Whisper("te", "Telugu").SeamlessTx("tel", "Telugu"),
|
238 |
+
TranslationLang("tgk_Cyrl", "Tajik").Whisper("tg", "Tajik").SeamlessTx("tgk", "Tajik"),
|
239 |
+
TranslationLang("tgl_Latn", "Tagalog").Whisper("tl", "Tagalog").SeamlessTx("tgl", "Tagalog"),
|
240 |
+
TranslationLang("tha_Thai", "Thai").Whisper("th", "Thai").SeamlessTx("tha", "Thai"),
|
241 |
+
TranslationLang("tir_Ethi", "Tigrinya"),
|
242 |
+
TranslationLang("taq_Latn", "Tamasheq (Latin script)"),
|
243 |
+
TranslationLang("taq_Tfng", "Tamasheq (Tifinagh script)"),
|
244 |
+
TranslationLang("tpi_Latn", "Tok Pisin"),
|
245 |
+
TranslationLang("tsn_Latn", "Tswana").M2M100("tn", "Tswana"),
|
246 |
+
TranslationLang("tso_Latn", "Tsonga"),
|
247 |
+
TranslationLang("tuk_Latn", "Turkmen").Whisper("tk", "Turkmen"),
|
248 |
+
TranslationLang("tum_Latn", "Tumbuka"),
|
249 |
+
TranslationLang("tur_Latn", "Turkish").Whisper("tr", "Turkish").SeamlessTx("tur", "Turkish"),
|
250 |
+
TranslationLang("twi_Latn", "Twi"),
|
251 |
+
TranslationLang("tzm_Tfng", "Central Atlas Tamazight"),
|
252 |
+
TranslationLang("uig_Arab", "Uyghur"),
|
253 |
+
TranslationLang("ukr_Cyrl", "Ukrainian").Whisper("uk", "Ukrainian").SeamlessTx("ukr", "Ukrainian"),
|
254 |
+
TranslationLang("umb_Latn", "Umbundu"),
|
255 |
+
TranslationLang("urd_Arab", "Urdu").Whisper("ur", "Urdu").SeamlessTx("urd", "Urdu"),
|
256 |
+
TranslationLang("uzn_Latn", "Northern Uzbek").Whisper("uz", "Uzbek").SeamlessTx("uzn", "Northern Uzbek"),
|
257 |
+
TranslationLang("vec_Latn", "Venetian"),
|
258 |
+
TranslationLang("vie_Latn", "Vietnamese").Whisper("vi", "Vietnamese").SeamlessTx("vie", "Vietnamese"),
|
259 |
+
TranslationLang("war_Latn", "Waray"),
|
260 |
+
TranslationLang("wol_Latn", "Wolof").M2M100("wo", "Wolof"),
|
261 |
+
TranslationLang("xho_Latn", "Xhosa").M2M100("xh", "Xhosa"),
|
262 |
+
TranslationLang("ydd_Hebr", "Eastern Yiddish").Whisper("yi", "Yiddish"),
|
263 |
+
TranslationLang("yor_Latn", "Yoruba").Whisper("yo", "Yoruba").SeamlessTx("yor", "Yoruba"),
|
264 |
+
TranslationLang("yue_Hant", "Yue Chinese").Whisper("yue", "cantonese").M2M100("zh", "Chinese (zh-yue)").SeamlessTx("yue", "Cantonese"),
|
265 |
+
TranslationLang("zho_Hans", "Chinese (Simplified)").Whisper("zh", "Chinese (Simplified)", "Chinese", "mandarin").SeamlessTx("cmn", "Mandarin Chinese (Simplified)"),
|
266 |
+
TranslationLang("zho_Hant", "Chinese (Traditional)").Whisper("zh", "Chinese (Traditional)").SeamlessTx("cmn_Hant", "Mandarin Chinese (Traditional)"),
|
267 |
+
TranslationLang("zsm_Latn", "Standard Malay").Whisper("ms", "Malay").SeamlessTx("zsm", "Standard Malay"),
|
268 |
+
TranslationLang("zul_Latn", "Zulu").M2M100("zu", "Zulu").SeamlessTx("zul", "Zulu"),
|
269 |
+
# TranslationLang(None, None).Whisper("br", "Breton"), # Both whisper and m2m100 support the Breton language, but nllb does not have this language.
|
270 |
]
|
271 |
|
272 |
|
|
|
276 |
|
277 |
_TO_LANG_NAME_WHISPER = {name.lower(): language for language in TranslationLangs if language.whisper is not None for name in language.whisper.names}
|
278 |
|
279 |
+
_TO_LANG_NAME_SeamlessTx = {name.lower(): language for language in TranslationLangs if language.seamlessTx is not None for name in language.seamlessTx.names}
|
280 |
+
|
281 |
_TO_LANG_CODE_WHISPER = {language.whisper.code.lower(): language for language in TranslationLangs if language.whisper is not None and len(language.whisper.code) > 0}
|
282 |
|
283 |
|
|
|
293 |
"""Return the TranslationLang from the lang_name_whisper name."""
|
294 |
return _TO_LANG_NAME_WHISPER.get(whisperName.lower() if whisperName else None, default)
|
295 |
|
296 |
+
def get_lang_from_seamlessTx_name(seamlessTxName, default=None) -> TranslationLang:
|
297 |
+
"""Return the TranslationLang from the lang_name_seamlessTx name."""
|
298 |
+
return _TO_LANG_NAME_SeamlessTx.get(seamlessTxName.lower() if seamlessTxName else None, default)
|
299 |
+
|
300 |
def get_lang_from_whisper_code(whisperCode, default=None) -> TranslationLang:
|
301 |
"""Return the TranslationLang from the lang_code_whisper."""
|
302 |
return _TO_LANG_CODE_WHISPER.get(whisperCode, default)
|
|
|
309 |
"""Return a list of m2m100 language names."""
|
310 |
return list({name.lower(): None for language in TranslationLangs if language.m2m100 is not None and (len(codes) == 0 or any(code in language.m2m100.code for code in codes)) for name in language.m2m100.names}.keys())
|
311 |
|
312 |
+
def get_lang_seamlessTx_names(codes = []):
|
313 |
+
"""Return a list of seamlessTx language names."""
|
314 |
+
return list({name.lower(): None for language in TranslationLangs if language.seamlessTx is not None and (len(codes) == 0 or any(code in language.seamlessTx.code for code in codes)) for name in language.seamlessTx.names}.keys())
|
315 |
+
|
316 |
def get_lang_whisper_names():
|
317 |
"""Return a list of whisper language names."""
|
318 |
return list(_TO_LANG_NAME_WHISPER.keys())
|
src/translation/translationModel.py
CHANGED
@@ -27,7 +27,7 @@ class TranslationModel:
|
|
27 |
localFilesOnly: bool = False,
|
28 |
loadModel: bool = False,
|
29 |
):
|
30 |
-
"""Initializes the M2M100 / Nllb-200 / mt5 / ALMA / madlad400 translation model.
|
31 |
|
32 |
Args:
|
33 |
modelConfig: Config of the model to use (distilled-600M, distilled-1.3B,
|
@@ -212,7 +212,7 @@ class TranslationModel:
|
|
212 |
elif "GGUF" in self.modelPath:
|
213 |
pass
|
214 |
elif self.usingBitsandbytes == None:
|
215 |
-
|
216 |
elif self.usingBitsandbytes == "int8":
|
217 |
kwargsModel.update({"load_in_8bit": True, "llm_int8_enable_fp32_cpu_offload": True})
|
218 |
elif self.usingBitsandbytes == "int4":
|
@@ -277,6 +277,14 @@ class TranslationModel:
|
|
277 |
self.transTokenizer = transformers.T5Tokenizer.from_pretrained(**kwargsTokenizer)
|
278 |
self.transModel = transformers.T5ForConditionalGeneration.from_pretrained(**kwargsModel)
|
279 |
kwargsPipeline.update({"task": "text2text-generation", "model": self.transModel, "tokenizer": self.transTokenizer})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
280 |
else:
|
281 |
kwargsTokenizer.update({"pretrained_model_name_or_path": self.modelPath})
|
282 |
self.transTokenizer = transformers.AutoTokenizer.from_pretrained(**kwargsTokenizer)
|
@@ -286,7 +294,7 @@ class TranslationModel:
|
|
286 |
kwargsPipeline.update({"src_lang": self.whisperLang.m2m100.code, "tgt_lang": self.translationLang.m2m100.code})
|
287 |
else: #NLLB
|
288 |
kwargsPipeline.update({"src_lang": self.whisperLang.nllb.code, "tgt_lang": self.translationLang.nllb.code})
|
289 |
-
if
|
290 |
self.transTranslator = transformers.pipeline(**kwargsPipeline)
|
291 |
except Exception as e:
|
292 |
self.release_vram()
|
@@ -310,6 +318,8 @@ class TranslationModel:
|
|
310 |
if getattr(self, "transModel", None) is not None and getattr(self.transModel, "unload_model", None) is not None:
|
311 |
self.transModel.unload_model()
|
312 |
|
|
|
|
|
313 |
if getattr(self, "transTokenizer", None) is not None:
|
314 |
del self.transTokenizer
|
315 |
if getattr(self, "transModel", None) is not None:
|
@@ -392,6 +402,13 @@ class TranslationModel:
|
|
392 |
elif "madlad400" in self.modelPath:
|
393 |
output = self.transTranslator(self.madlad400Prefix + text, max_length=max_length, batch_size=self.batchSize, no_repeat_ngram_size=self.noRepeatNgramSize, num_beams=self.numBeams) #, num_return_sequences=2
|
394 |
result = output[0]['generated_text']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
395 |
else: #M2M100 & NLLB
|
396 |
output = self.transTranslator(text, max_length=max_length, batch_size=self.batchSize, no_repeat_ngram_size=self.noRepeatNgramSize, num_beams=self.numBeams)
|
397 |
result = output[0]['translation_text']
|
@@ -406,7 +423,8 @@ _MODELS = ["nllb-200",
|
|
406 |
"m2m100",
|
407 |
"mt5",
|
408 |
"ALMA",
|
409 |
-
"madlad400"
|
|
|
410 |
|
411 |
def check_model_name(name):
|
412 |
return any(allowed_name in name for allowed_name in _MODELS)
|
@@ -466,7 +484,9 @@ def download_model(
|
|
466 |
"model.safetensors.index.json",
|
467 |
"quantize_config.json",
|
468 |
"tokenizer.model",
|
469 |
-
"vocabulary.json"
|
|
|
|
|
470 |
]
|
471 |
|
472 |
kwargs = {
|
|
|
27 |
localFilesOnly: bool = False,
|
28 |
loadModel: bool = False,
|
29 |
):
|
30 |
+
"""Initializes the M2M100 / Nllb-200 / mt5 / ALMA / madlad400 / seamless-m4t translation model.
|
31 |
|
32 |
Args:
|
33 |
modelConfig: Config of the model to use (distilled-600M, distilled-1.3B,
|
|
|
212 |
elif "GGUF" in self.modelPath:
|
213 |
pass
|
214 |
elif self.usingBitsandbytes == None:
|
215 |
+
kwargsPipeline.update({"device": self.device})
|
216 |
elif self.usingBitsandbytes == "int8":
|
217 |
kwargsModel.update({"load_in_8bit": True, "llm_int8_enable_fp32_cpu_offload": True})
|
218 |
elif self.usingBitsandbytes == "int4":
|
|
|
277 |
self.transTokenizer = transformers.T5Tokenizer.from_pretrained(**kwargsTokenizer)
|
278 |
self.transModel = transformers.T5ForConditionalGeneration.from_pretrained(**kwargsModel)
|
279 |
kwargsPipeline.update({"task": "text2text-generation", "model": self.transModel, "tokenizer": self.transTokenizer})
|
280 |
+
elif "seamless" in self.modelPath:
|
281 |
+
self.transProcessor = transformers.AutoProcessor.from_pretrained(self.modelPath)
|
282 |
+
if "v2" in self.modelPath:
|
283 |
+
self.transModel = transformers.SeamlessM4Tv2Model.from_pretrained(**kwargsModel)
|
284 |
+
else:
|
285 |
+
self.transModel = transformers.SeamlessM4TModel.from_pretrained(**kwargsModel)
|
286 |
+
if self.device != "cpu" and "load_in_8bit" not in kwargsModel and "load_in_4bit" not in kwargsModel:
|
287 |
+
self.transModel.to(self.device)
|
288 |
else:
|
289 |
kwargsTokenizer.update({"pretrained_model_name_or_path": self.modelPath})
|
290 |
self.transTokenizer = transformers.AutoTokenizer.from_pretrained(**kwargsTokenizer)
|
|
|
294 |
kwargsPipeline.update({"src_lang": self.whisperLang.m2m100.code, "tgt_lang": self.translationLang.m2m100.code})
|
295 |
else: #NLLB
|
296 |
kwargsPipeline.update({"src_lang": self.whisperLang.nllb.code, "tgt_lang": self.translationLang.nllb.code})
|
297 |
+
if not any(name in self.modelPath for name in ["ct2", "seamless"]):
|
298 |
self.transTranslator = transformers.pipeline(**kwargsPipeline)
|
299 |
except Exception as e:
|
300 |
self.release_vram()
|
|
|
318 |
if getattr(self, "transModel", None) is not None and getattr(self.transModel, "unload_model", None) is not None:
|
319 |
self.transModel.unload_model()
|
320 |
|
321 |
+
if getattr(self, "transProcessor") is not None:
|
322 |
+
del self.transProcessor
|
323 |
if getattr(self, "transTokenizer", None) is not None:
|
324 |
del self.transTokenizer
|
325 |
if getattr(self, "transModel", None) is not None:
|
|
|
402 |
elif "madlad400" in self.modelPath:
|
403 |
output = self.transTranslator(self.madlad400Prefix + text, max_length=max_length, batch_size=self.batchSize, no_repeat_ngram_size=self.noRepeatNgramSize, num_beams=self.numBeams) #, num_return_sequences=2
|
404 |
result = output[0]['generated_text']
|
405 |
+
elif "seamless" in self.modelPath:
|
406 |
+
if self.device != "cpu":
|
407 |
+
text_inputs = self.transProcessor(text = text, src_lang=self.whisperLang.seamlessTx.code, return_tensors="pt").to(self.device)
|
408 |
+
else:
|
409 |
+
text_inputs = self.transProcessor(text = text, src_lang=self.whisperLang.seamlessTx.code, return_tensors="pt")
|
410 |
+
output_tokens = self.transModel.generate(**text_inputs, tgt_lang=self.translationLang.seamlessTx.code, generate_speech=False, no_repeat_ngram_size=self.noRepeatNgramSize, num_beams=self.numBeams)
|
411 |
+
result = self.transProcessor.decode(output_tokens[0].tolist()[0], skip_special_tokens=True)
|
412 |
else: #M2M100 & NLLB
|
413 |
output = self.transTranslator(text, max_length=max_length, batch_size=self.batchSize, no_repeat_ngram_size=self.noRepeatNgramSize, num_beams=self.numBeams)
|
414 |
result = output[0]['translation_text']
|
|
|
423 |
"m2m100",
|
424 |
"mt5",
|
425 |
"ALMA",
|
426 |
+
"madlad400",
|
427 |
+
"seamless"]
|
428 |
|
429 |
def check_model_name(name):
|
430 |
return any(allowed_name in name for allowed_name in _MODELS)
|
|
|
484 |
"model.safetensors.index.json",
|
485 |
"quantize_config.json",
|
486 |
"tokenizer.model",
|
487 |
+
"vocabulary.json",
|
488 |
+
"preprocessor_config.json",
|
489 |
+
"added_tokens.json"
|
490 |
]
|
491 |
|
492 |
kwargs = {
|