Spaces:
Runtime error
Runtime error
File size: 1,025 Bytes
acfceff f3616e1 acfceff f3616e1 acfceff f3616e1 acfceff f3616e1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
tokenizer = AutoTokenizer.from_pretrained("akhooli/mistral-7B-llm")
model = AutoModelForSequenceClassification.from_pretrained("akhooli/mistral-7B-llm")
def predict(image_path):
# Load and preprocess the image
with open(image_path, "rb") as f:
image_bytes = f.read()
# Tokenize and predict
inputs = tokenizer(image_bytes, return_tensors="pt", padding=True, truncation=True)
outputs = model(**inputs)
predicted_class_idx = torch.argmax(outputs.logits)
# In this example, we are assuming the labels are ['pizza', 'burger', 'sandwich']
labels = ['pizza', 'burger', 'sandwich']
predicted_label = labels[predicted_class_idx]
return predicted_label
gr.Interface(
predict,
inputs=gr.Image(label="Upload junk food (sandwich, pizza, burger) candidate", type="file"),
outputs=gr.Label(num_top_classes=3),
title="Pizza, Burger, or Sandwich?",
).launch()
|