jhj0517 commited on
Commit
5341b3b
·
unverified ·
2 Parent(s): b4876a0 d36cf56

Merge pull request #223 from jhj0517/refactor/abstract-class

Browse files
app.py CHANGED
@@ -257,6 +257,9 @@ class App:
257
  self.deepl_api.available_target_langs.keys()))
258
  with gr.Row():
259
  cb_deepl_ispro = gr.Checkbox(label="Pro User?", value=False)
 
 
 
260
  with gr.Row():
261
  btn_run = gr.Button("TRANSLATE SUBTITLE FILE", variant="primary")
262
  with gr.Row():
@@ -266,7 +269,7 @@ class App:
266
 
267
  btn_run.click(fn=self.deepl_api.translate_deepl,
268
  inputs=[tb_authkey, file_subs, dd_deepl_sourcelang, dd_deepl_targetlang,
269
- cb_deepl_ispro],
270
  outputs=[tb_indicator, files_subtitles])
271
 
272
  btn_openfolder.click(fn=lambda: self.open_folder(os.path.join("outputs", "translations")),
 
257
  self.deepl_api.available_target_langs.keys()))
258
  with gr.Row():
259
  cb_deepl_ispro = gr.Checkbox(label="Pro User?", value=False)
260
+ with gr.Row():
261
+ cb_timestamp = gr.Checkbox(value=True, label="Add a timestamp to the end of the filename",
262
+ interactive=True)
263
  with gr.Row():
264
  btn_run = gr.Button("TRANSLATE SUBTITLE FILE", variant="primary")
265
  with gr.Row():
 
269
 
270
  btn_run.click(fn=self.deepl_api.translate_deepl,
271
  inputs=[tb_authkey, file_subs, dd_deepl_sourcelang, dd_deepl_targetlang,
272
+ cb_deepl_ispro, cb_timestamp],
273
  outputs=[tb_indicator, files_subtitles])
274
 
275
  btn_openfolder.click(fn=lambda: self.open_folder(os.path.join("outputs", "translations")),
modules/translation/deepl_api.py CHANGED
@@ -83,7 +83,7 @@ DEEPL_AVAILABLE_SOURCE_LANGS = {
83
 
84
  class DeepLAPI:
85
  def __init__(self,
86
- output_dir: str
87
  ):
88
  self.api_interval = 1
89
  self.max_text_batch_size = 50
@@ -97,6 +97,7 @@ class DeepLAPI:
97
  source_lang: str,
98
  target_lang: str,
99
  is_pro: bool,
 
100
  progress=gr.Progress()) -> list:
101
  """
102
  Translate subtitle files using DeepL API
@@ -112,6 +113,8 @@ class DeepLAPI:
112
  Target language of the file to transcribe from gr.Dropdown()
113
  is_pro: str
114
  Boolean value that is about pro user or not from gr.Checkbox().
 
 
115
  progress: gr.Progress
116
  Indicator to show progress directly in gradio.
117
 
@@ -141,10 +144,12 @@ class DeepLAPI:
141
  progress(batch_end / len(parsed_dicts), desc="Translating..")
142
 
143
  subtitle = get_serialized_srt(parsed_dicts)
144
- timestamp = datetime.now().strftime("%m%d%H%M%S")
145
 
146
- file_name = file_name[:-9]
147
- output_path = os.path.join(self.output_dir, "", f"{file_name}-{timestamp}.srt")
 
 
 
148
  write_file(subtitle, output_path)
149
 
150
  elif file_ext == ".vtt":
@@ -161,11 +166,12 @@ class DeepLAPI:
161
  progress(batch_end / len(parsed_dicts), desc="Translating..")
162
 
163
  subtitle = get_serialized_vtt(parsed_dicts)
164
- timestamp = datetime.now().strftime("%m%d%H%M%S")
165
 
166
- file_name = file_name[:-9]
167
- output_path = os.path.join(self.output_dir, "", f"{file_name}-{timestamp}.vtt")
 
168
 
 
169
  write_file(subtitle, output_path)
170
 
171
  files_info[file_name] = {"subtitle": subtitle, "path": output_path}
 
83
 
84
  class DeepLAPI:
85
  def __init__(self,
86
+ output_dir: str = os.path.join("outputs", "translations")
87
  ):
88
  self.api_interval = 1
89
  self.max_text_batch_size = 50
 
97
  source_lang: str,
98
  target_lang: str,
99
  is_pro: bool,
100
+ add_timestamp: bool,
101
  progress=gr.Progress()) -> list:
102
  """
103
  Translate subtitle files using DeepL API
 
113
  Target language of the file to transcribe from gr.Dropdown()
114
  is_pro: str
115
  Boolean value that is about pro user or not from gr.Checkbox().
116
+ add_timestamp: bool
117
+ Boolean value from gr.Checkbox() that determines whether to add a timestamp at the end of the filename.
118
  progress: gr.Progress
119
  Indicator to show progress directly in gradio.
120
 
 
144
  progress(batch_end / len(parsed_dicts), desc="Translating..")
145
 
146
  subtitle = get_serialized_srt(parsed_dicts)
 
147
 
148
+ if add_timestamp:
149
+ timestamp = datetime.now().strftime("%m%d%H%M%S")
150
+ file_name += f"-{timestamp}"
151
+
152
+ output_path = os.path.join(self.output_dir, "", f"{file_name}.srt")
153
  write_file(subtitle, output_path)
154
 
155
  elif file_ext == ".vtt":
 
166
  progress(batch_end / len(parsed_dicts), desc="Translating..")
167
 
168
  subtitle = get_serialized_vtt(parsed_dicts)
 
169
 
170
+ if add_timestamp:
171
+ timestamp = datetime.now().strftime("%m%d%H%M%S")
172
+ file_name += f"-{timestamp}"
173
 
174
+ output_path = os.path.join(self.output_dir, "", f"{file_name}.vtt")
175
  write_file(subtitle, output_path)
176
 
177
  files_info[file_name] = {"subtitle": subtitle, "path": output_path}
modules/translation/nllb_inference.py CHANGED
@@ -7,8 +7,8 @@ from modules.translation.translation_base import TranslationBase
7
 
8
  class NLLBInference(TranslationBase):
9
  def __init__(self,
10
- model_dir: str,
11
- output_dir: str
12
  ):
13
  super().__init__(
14
  model_dir=model_dir,
 
7
 
8
  class NLLBInference(TranslationBase):
9
  def __init__(self,
10
+ model_dir: str = os.path.join("models", "NLLB"),
11
+ output_dir: str = os.path.join("outputs", "translations")
12
  ):
13
  super().__init__(
14
  model_dir=model_dir,
modules/translation/translation_base.py CHANGED
@@ -11,8 +11,9 @@ from modules.utils.subtitle_manager import *
11
 
12
  class TranslationBase(ABC):
13
  def __init__(self,
14
- model_dir: str,
15
- output_dir: str):
 
16
  super().__init__()
17
  self.model = None
18
  self.model_dir = model_dir
 
11
 
12
  class TranslationBase(ABC):
13
  def __init__(self,
14
+ model_dir: str = os.path.join("models", "NLLB"),
15
+ output_dir: str = os.path.join("outputs", "translations")
16
+ ):
17
  super().__init__()
18
  self.model = None
19
  self.model_dir = model_dir
modules/whisper/faster_whisper_inference.py CHANGED
@@ -17,17 +17,15 @@ from modules.whisper.whisper_base import WhisperBase
17
 
18
  class FasterWhisperInference(WhisperBase):
19
  def __init__(self,
20
- model_dir: Optional[str] = None,
21
- diarization_model_dir: Optional[str] = None,
22
- output_dir: Optional[str] = None,
23
  ):
24
  super().__init__(
25
  model_dir=model_dir,
26
  diarization_model_dir=diarization_model_dir,
27
  output_dir=output_dir
28
  )
29
- if model_dir is None:
30
- model_dir = os.path.join("models", "Whisper", "faster-whisper")
31
  self.model_dir = model_dir
32
  os.makedirs(self.model_dir, exist_ok=True)
33
 
 
17
 
18
  class FasterWhisperInference(WhisperBase):
19
  def __init__(self,
20
+ model_dir: str = os.path.join("models", "Whisper", "faster-whisper"),
21
+ diarization_model_dir: str = os.path.join("models", "Diarization"),
22
+ output_dir: str = os.path.join("outputs"),
23
  ):
24
  super().__init__(
25
  model_dir=model_dir,
26
  diarization_model_dir=diarization_model_dir,
27
  output_dir=output_dir
28
  )
 
 
29
  self.model_dir = model_dir
30
  os.makedirs(self.model_dir, exist_ok=True)
31
 
modules/whisper/insanely_fast_whisper_inference.py CHANGED
@@ -17,17 +17,15 @@ from modules.whisper.whisper_base import WhisperBase
17
 
18
  class InsanelyFastWhisperInference(WhisperBase):
19
  def __init__(self,
20
- model_dir: Optional[str] = None,
21
- diarization_model_dir: Optional[str] = None,
22
- output_dir: Optional[str] = None,
23
  ):
24
  super().__init__(
25
  model_dir=model_dir,
26
  output_dir=output_dir,
27
  diarization_model_dir=diarization_model_dir
28
  )
29
- if model_dir is None:
30
- model_dir = os.path.join("models", "Whisper", "insanely-fast-whisper")
31
  self.model_dir = model_dir
32
  os.makedirs(self.model_dir, exist_ok=True)
33
 
 
17
 
18
  class InsanelyFastWhisperInference(WhisperBase):
19
  def __init__(self,
20
+ model_dir: str = os.path.join("models", "Whisper", "insanely-fast-whisper"),
21
+ diarization_model_dir: str = os.path.join("models", "Diarization"),
22
+ output_dir: str = os.path.join("outputs"),
23
  ):
24
  super().__init__(
25
  model_dir=model_dir,
26
  output_dir=output_dir,
27
  diarization_model_dir=diarization_model_dir
28
  )
 
 
29
  self.model_dir = model_dir
30
  os.makedirs(self.model_dir, exist_ok=True)
31
 
modules/whisper/whisper_Inference.py CHANGED
@@ -4,6 +4,7 @@ import time
4
  from typing import BinaryIO, Union, Tuple, List
5
  import numpy as np
6
  import torch
 
7
  from argparse import Namespace
8
 
9
  from modules.whisper.whisper_base import WhisperBase
@@ -12,9 +13,9 @@ from modules.whisper.whisper_parameter import *
12
 
13
  class WhisperInference(WhisperBase):
14
  def __init__(self,
15
- model_dir: Optional[str] = None,
16
- diarization_model_dir: Optional[str] = None,
17
- output_dir: Optional[str] = None,
18
  ):
19
  super().__init__(
20
  model_dir=model_dir,
 
4
  from typing import BinaryIO, Union, Tuple, List
5
  import numpy as np
6
  import torch
7
+ import os
8
  from argparse import Namespace
9
 
10
  from modules.whisper.whisper_base import WhisperBase
 
13
 
14
  class WhisperInference(WhisperBase):
15
  def __init__(self,
16
+ model_dir: str = os.path.join("models", "Whisper"),
17
+ diarization_model_dir: str = os.path.join("models", "Diarization"),
18
+ output_dir: str = os.path.join("outputs"),
19
  ):
20
  super().__init__(
21
  model_dir=model_dir,
modules/whisper/whisper_base.py CHANGED
@@ -19,17 +19,10 @@ from modules.vad.silero_vad import SileroVAD
19
 
20
  class WhisperBase(ABC):
21
  def __init__(self,
22
- model_dir: Optional[str] = None,
23
- diarization_model_dir: Optional[str] = None,
24
- output_dir: Optional[str] = None,
25
  ):
26
- if model_dir is None:
27
- model_dir = os.path.join("models", "Whisper")
28
- if diarization_model_dir is None:
29
- diarization_model_dir = os.path.join("models", "Diarization")
30
- if output_dir is None:
31
- output_dir = os.path.join("outputs")
32
-
33
  self.model_dir = model_dir
34
  self.output_dir = output_dir
35
  os.makedirs(self.output_dir, exist_ok=True)
 
19
 
20
  class WhisperBase(ABC):
21
  def __init__(self,
22
+ model_dir: str = os.path.join("models", "Whisper"),
23
+ diarization_model_dir: str = os.path.join("models", "Diarization"),
24
+ output_dir: str = os.path.join("outputs"),
25
  ):
 
 
 
 
 
 
 
26
  self.model_dir = model_dir
27
  self.output_dir = output_dir
28
  os.makedirs(self.output_dir, exist_ok=True)
modules/whisper/whisper_factory.py CHANGED
@@ -11,11 +11,11 @@ class WhisperFactory:
11
  @staticmethod
12
  def create_whisper_inference(
13
  whisper_type: str,
14
- whisper_model_dir: Optional[str] = None,
15
- faster_whisper_model_dir: Optional[str] = None,
16
- insanely_fast_whisper_model_dir: Optional[str] = None,
17
- diarization_model_dir: Optional[str] = None,
18
- output_dir: Optional[str] = None,
19
  ) -> "WhisperBase":
20
  """
21
  Create a whisper inference class based on the provided whisper_type.
 
11
  @staticmethod
12
  def create_whisper_inference(
13
  whisper_type: str,
14
+ whisper_model_dir: str = os.path.join("models", "Whisper"),
15
+ faster_whisper_model_dir: str = os.path.join("models", "Whisper", "faster-whisper"),
16
+ insanely_fast_whisper_model_dir: str = os.path.join("models", "Whisper", "insanely-fast-whisper"),
17
+ diarization_model_dir: str = os.path.join("models", "Diarization"),
18
+ output_dir: str = os.path.join("outputs"),
19
  ) -> "WhisperBase":
20
  """
21
  Create a whisper inference class based on the provided whisper_type.