Spaces:
Running
Running
Upload app.py
Browse files
app.py
CHANGED
|
@@ -9,6 +9,7 @@ import random
|
|
| 9 |
import config
|
| 10 |
import utils
|
| 11 |
import logging
|
|
|
|
| 12 |
from PIL import Image, PngImagePlugin
|
| 13 |
from datetime import datetime
|
| 14 |
from diffusers.models import AutoencoderKL
|
|
@@ -68,7 +69,6 @@ def initialize_llm():
|
|
| 68 |
"""γ’γγͺγ±γΌγ·γ§γ³θ΅·εζγ«LLMγ γγεζεγγι’ζ°"""
|
| 69 |
|
| 70 |
if TEXT_TO_PROMPT_ENABLED:
|
| 71 |
-
import prompt_generator
|
| 72 |
torch.backends.cudnn.deterministic = True
|
| 73 |
torch.backends.cudnn.benchmark = False
|
| 74 |
torch.backends.cuda.matmul.allow_tf32 = True
|
|
|
|
| 9 |
import config
|
| 10 |
import utils
|
| 11 |
import logging
|
| 12 |
+
import prompt_generator
|
| 13 |
from PIL import Image, PngImagePlugin
|
| 14 |
from datetime import datetime
|
| 15 |
from diffusers.models import AutoencoderKL
|
|
|
|
| 69 |
"""γ’γγͺγ±γΌγ·γ§γ³θ΅·εζγ«LLMγ γγεζεγγι’ζ°"""
|
| 70 |
|
| 71 |
if TEXT_TO_PROMPT_ENABLED:
|
|
|
|
| 72 |
torch.backends.cudnn.deterministic = True
|
| 73 |
torch.backends.cudnn.benchmark = False
|
| 74 |
torch.backends.cuda.matmul.allow_tf32 = True
|