TonAI-Assistant / utils.py
tungedng2710's picture
Upload folder using huggingface_hub
486a808 verified
raw
history blame contribute delete
No virus
3.14 kB
import gradio as gr
import GPUtil
DIFFUSION_CHECKPOINTS = {
"General (SD 2.1)": {
"path": "stabilityai/stable-diffusion-2-1",
"type": "pretrained",
"pipeline": "StableDiffusionPipeline"
},
"Realistic (SD 1.5)": {
"path": "/root/tungn197/genAI/checkpoints/realisticVisionV60B1_v51HyperVAE.safetensors",
"type": "file",
"pipeline": "StableDiffusionPipeline"
},
"Anime (SD 1.5)": {
"path": "/root/tungn197/genAI/checkpoints/darkSushiMixMix_225D.safetensors",
"type": "file",
"pipeline": "StableDiffusionPipeline"
},
"Comic Book (SD 1.5)": {
"path": "/media/drive-2t/tungn197/checkpoints/realisticComicBook_v10.safetensors",
"type": "file",
"pipeline": "StableDiffusionPipeline"
},
"MajicMixRealisticV7 (SD 1.5 Focus on Asian face)": {
"path": "/root/tungn197/genAI/checkpoints/majicmixRealistic_v7.safetensors",
"type": "file",
"pipeline": "StableDiffusionPipeline"
},
"AniMeshFullV22 (SD 1.5)": {
"path": "/media/drive-2t/tungn197/checkpoints/animesh_FullV22.safetensors",
"type": "file",
"pipeline": "StableDiffusionPipeline"
},
"epiCRealism XL (SDXL 1.0)": {
"path": "/root/tungn197/genAI/checkpoints/epicrealismXL_v7FinalDestination.safetensors",
"type": "file",
"pipeline": "StableDiffusionXLPipeline"
},
"Juggernaut X Hyper (SDXL 1.0)": {
"path": "RunDiffusion/Juggernaut-X-Hyper",
"type": "pretrained",
"pipeline": "StableDiffusionXLPipeline"
}
}
AUTH_USER = [("admin", "admin")]
DEVICE = "cuda:0"
APP_THEME = gr.Theme.from_hub("ParityError/Interstellar")
def read_md_file_to_string(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as file:
file_content = file.read()
return file_content
except FileNotFoundError:
print(f"The file at {file_path} was not found.")
except Exception as e:
print(f"An error occurred: {e}")
def nearest_divisible_by_8(n):
lower_multiple = (n // 8) * 8
upper_multiple = lower_multiple + 8
if (n - lower_multiple) < (upper_multiple - n):
return int(lower_multiple)
else:
return int(upper_multiple)
def get_gpu_info():
gpus = GPUtil.getGPUs()
gpu_info = []
for gpu in gpus:
info = {
'id': gpu.id,
'name': gpu.name,
'driver_version': gpu.driver,
'total_memory': gpu.memoryTotal, # In MB
'available_memory': gpu.memoryFree, # In MB
'used_memory': gpu.memoryUsed, # In MB
'temperature': gpu.temperature # In Celsius
}
gpu_info.append(info)
return gpu_info
def display_gpu_info():
info_list = []
gpus = get_gpu_info()
for info in gpus:
info_list.append(f"GPU {info['id']} ({info['name']}, Total: {info['total_memory']} MB, Available: {info['available_memory']} MB)")
return info_list
GPUS = get_gpu_info()
DISPLAYED_GPUS_INFO = []
for info in GPUS:
DISPLAYED_GPUS_INFO.append(f"GPU {info['id']}")