charbelgrower commited on
Commit
533fe99
1 Parent(s): 8562112
Files changed (2) hide show
  1. Result.mp4 +0 -0
  2. app.py +21 -18
Result.mp4 ADDED
Binary file (220 kB). View file
 
app.py CHANGED
@@ -43,7 +43,7 @@ user_args = parser.parse_args()
43
  ## ------------------------------ DEFAULTS ------------------------------
44
 
45
  USE_COLAB = user_args.colab
46
- USE_CUDA = user_args.cuda
47
  DEF_OUTPUT_PATH = user_args.out_dir
48
  BATCH_SIZE = int(user_args.batch_size)
49
  WORKSPACE = None
@@ -63,7 +63,8 @@ MASK_INCLUDE = [
63
  "Nose",
64
  "Mouth",
65
  "L-Lip",
66
- "U-Lip"
 
67
  ]
68
  MASK_SOFT_KERNEL = 17
69
  MASK_SOFT_ITERATIONS = 10
@@ -82,22 +83,9 @@ FACE_ENHANCER_LIST.extend(cv2_interpolations)
82
  ## ------------------------------ SET EXECUTION PROVIDER ------------------------------
83
  # Note: Non CUDA users may change settings here
84
 
85
- PROVIDER = ["CPUExecutionProvider"]
 
86
 
87
- if USE_CUDA:
88
- available_providers = onnxruntime.get_available_providers()
89
- if "CUDAExecutionProvider" in available_providers:
90
- print("\n********** Running on CUDA **********\n")
91
- PROVIDER = ["CUDAExecutionProvider", "CPUExecutionProvider"]
92
- cv2.setNumThreads(32)
93
- else:
94
- USE_CUDA = False
95
- print("\n********** CUDA unavailable running on CPU **********\n")
96
- else:
97
- USE_CUDA = False
98
- print("\n********** Running on CPU **********\n")
99
-
100
- device = "cuda" if USE_CUDA else "cpu"
101
  EMPTY_CACHE = lambda: torch.cuda.empty_cache() if device == "cuda" else None
102
 
103
  ## ------------------------------ LOAD MODELS ------------------------------
@@ -166,8 +154,23 @@ def process(
166
  global WORKSPACE
167
  global OUTPUT_FILE
168
  global PREVIEW
169
- WORKSPACE, OUTPUT_FILE, PREVIEW = None, None, None
 
 
 
170
 
 
 
 
 
 
 
 
 
 
 
 
 
171
  ## ------------------------------ GUI UPDATE FUNC ------------------------------
172
 
173
  def ui_before():
 
43
  ## ------------------------------ DEFAULTS ------------------------------
44
 
45
  USE_COLAB = user_args.colab
46
+ USE_CUDA = False
47
  DEF_OUTPUT_PATH = user_args.out_dir
48
  BATCH_SIZE = int(user_args.batch_size)
49
  WORKSPACE = None
 
63
  "Nose",
64
  "Mouth",
65
  "L-Lip",
66
+ "U-Lip",
67
+ "Hair"
68
  ]
69
  MASK_SOFT_KERNEL = 17
70
  MASK_SOFT_ITERATIONS = 10
 
83
  ## ------------------------------ SET EXECUTION PROVIDER ------------------------------
84
  # Note: Non CUDA users may change settings here
85
 
86
+ PROVIDER = ["CPUExecutionProvider"] # Default to CPU provider
87
+ device = "cpu"
88
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  EMPTY_CACHE = lambda: torch.cuda.empty_cache() if device == "cuda" else None
90
 
91
  ## ------------------------------ LOAD MODELS ------------------------------
 
154
  global WORKSPACE
155
  global OUTPUT_FILE
156
  global PREVIEW
157
+ global USE_CUDA # Access global variables
158
+ global device
159
+ global PROVIDER
160
+ global FACE_ANALYSER, FACE_SWAPPER, FACE_ENHANCER, FACE_PARSER, NSFW_DETECTOR
161
 
162
+ WORKSPACE, OUTPUT_FILE, PREVIEW = None, None, None
163
+ USE_CUDA = True
164
+ device = "cuda"
165
+ PROVIDER = ["CUDAExecutionProvider", "CPUExecutionProvider"]
166
+
167
+ # Reset models to None to reload them with GPU
168
+ FACE_ANALYSER = None
169
+ FACE_SWAPPER = None
170
+ FACE_ENHANCER = None
171
+ FACE_PARSER = None
172
+ NSFW_DETECTOR = None
173
+
174
  ## ------------------------------ GUI UPDATE FUNC ------------------------------
175
 
176
  def ui_before():