jhj0517 commited on
Commit
3408366
·
1 Parent(s): c8fe600

fix spaces bug

Browse files
modules/diarize/diarizer.py CHANGED
@@ -10,7 +10,6 @@ from modules.diarize.audio_loader import load_audio
10
 
11
 
12
  class Diarizer:
13
- @spaces.GPU
14
  def __init__(self,
15
  model_dir: str = os.path.join("models", "Diarization")
16
  ):
 
10
 
11
 
12
  class Diarizer:
 
13
  def __init__(self,
14
  model_dir: str = os.path.join("models", "Diarization")
15
  ):
modules/translation/nllb_inference.py CHANGED
@@ -7,7 +7,6 @@ from modules.translation.translation_base import TranslationBase
7
 
8
 
9
  class NLLBInference(TranslationBase):
10
- @spaces.GPU
11
  def __init__(self,
12
  model_dir: str,
13
  output_dir: str
 
7
 
8
 
9
  class NLLBInference(TranslationBase):
 
10
  def __init__(self,
11
  model_dir: str,
12
  output_dir: str
modules/translation/translation_base.py CHANGED
@@ -4,6 +4,7 @@ import gradio as gr
4
  from abc import ABC, abstractmethod
5
  from typing import List
6
  from datetime import datetime
 
7
 
8
  from modules.whisper.whisper_parameter import *
9
  from modules.utils.subtitle_manager import *
@@ -127,6 +128,7 @@ class TranslationBase(ABC):
127
  self.remove_input_files([fileobj.name for fileobj in fileobjs])
128
 
129
  @staticmethod
 
130
  def get_device():
131
  if torch.cuda.is_available():
132
  return "cuda"
@@ -136,6 +138,7 @@ class TranslationBase(ABC):
136
  return "cpu"
137
 
138
  @staticmethod
 
139
  def release_cuda_memory():
140
  if torch.cuda.is_available():
141
  torch.cuda.empty_cache()
 
4
  from abc import ABC, abstractmethod
5
  from typing import List
6
  from datetime import datetime
7
+ import spaces
8
 
9
  from modules.whisper.whisper_parameter import *
10
  from modules.utils.subtitle_manager import *
 
128
  self.remove_input_files([fileobj.name for fileobj in fileobjs])
129
 
130
  @staticmethod
131
+ @spaces.GPU
132
  def get_device():
133
  if torch.cuda.is_available():
134
  return "cuda"
 
138
  return "cpu"
139
 
140
  @staticmethod
141
+ @spaces.GPU
142
  def release_cuda_memory():
143
  if torch.cuda.is_available():
144
  torch.cuda.empty_cache()
modules/whisper/whisper_base.py CHANGED
@@ -19,7 +19,6 @@ from modules.vad.silero_vad import SileroVAD
19
 
20
 
21
  class WhisperBase(ABC):
22
- @spaces.GPU
23
  def __init__(self,
24
  model_dir: str,
25
  output_dir: str,
@@ -400,6 +399,7 @@ class WhisperBase(ABC):
400
  return time_str.strip()
401
 
402
  @staticmethod
 
403
  def get_device():
404
  if torch.cuda.is_available():
405
  return "cuda"
 
19
 
20
 
21
  class WhisperBase(ABC):
 
22
  def __init__(self,
23
  model_dir: str,
24
  output_dir: str,
 
399
  return time_str.strip()
400
 
401
  @staticmethod
402
+ @spaces.GPU
403
  def get_device():
404
  if torch.cuda.is_available():
405
  return "cuda"