import gradio as gr from transformers import pipeline import multiprocessing import torch import os from psutil import virtual_memory from pathlib import Path import random from datetime import datetime import PIL from PIL import Image import os import gc from translatepy import Translator, Language ts = Translator() from deep_translator import DeeplTranslator, GoogleTranslator from pathlib import Path import csv import logging import requests from rudalle.pipelines import generate_images, show, super_resolution, cherry_pick_by_ruclip from rudalle import get_rudalle_model, get_tokenizer, get_vae, get_realesrgan from rudalle.utils import seed_everything from ruclip import load as get_ruclip # prepare models: device = 'cuda' model = get_rudalle_model('Malevich', pretrained=True, fp16=True, device=device) tokenizer = get_tokenizer() vae = get_vae(dwt=True).to(device) #device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") pipeline = pipeline(task="image-classification", model="julien-c/hotdog-not-hotdog") def predict(image): predictions = pipeline(image) return {p["label"]: p["score"] for p in predictions} gr.Interface( predict, inputs=gr.inputs.Image(label="Upload hot dog candidate", type="filepath"), outputs=gr.outputs.Label(num_top_classes=2), title="Hot Dog? Or Not?", ).launch()