taesiri commited on
Commit
269743b
1 Parent(s): 280da27
Files changed (1) hide show
  1. app.py +7 -0
app.py CHANGED
@@ -1,8 +1,15 @@
 
1
  import gradio as gr
2
  import torch
3
  from PIL import Image
4
  from transformers import MllamaForConditionalGeneration, AutoProcessor
5
  from peft import PeftModel
 
 
 
 
 
 
6
 
7
  # Load model and processor (do this outside the inference function to avoid reloading)
8
  base_model_path = "meta-llama/Llama-3.2-11B-Vision-Instruct"
 
1
+ import os
2
  import gradio as gr
3
  import torch
4
  from PIL import Image
5
  from transformers import MllamaForConditionalGeneration, AutoProcessor
6
  from peft import PeftModel
7
+ from huggingface_hub import login
8
+
9
+ # Login to Hugging Face
10
+ if "HF_TOKEN" not in os.environ:
11
+ raise ValueError("Please set the HF_TOKEN environment variable with your Hugging Face token")
12
+ login(token=os.environ["HF_TOKEN"])
13
 
14
  # Load model and processor (do this outside the inference function to avoid reloading)
15
  base_model_path = "meta-llama/Llama-3.2-11B-Vision-Instruct"