gokilashree commited on
Commit
8e9ad66
1 Parent(s): ab786ba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -2
app.py CHANGED
@@ -2,6 +2,7 @@ from transformers import MBartForConditionalGeneration, MBart50Tokenizer, AutoMo
2
  import gradio as gr
3
  import torch
4
  from diffusers import FluxPipeline
 
5
 
6
  # Load the translation model and tokenizer
7
  model_name = "facebook/mbart-large-50-many-to-one-mmt"
@@ -16,8 +17,15 @@ text_model = AutoModelForCausalLM.from_pretrained(text_generation_model_name)
16
  # Create a pipeline for text generation using the selected model
17
  text_generator = pipeline("text-generation", model=text_model, tokenizer=text_tokenizer)
18
 
19
- # Set up the new FluxPipeline for the text-to-image model
20
- pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
 
 
 
 
 
 
 
21
  pipe.enable_model_cpu_offload() # Enable CPU offloading to save GPU memory if needed
22
 
23
  # Function to generate an image using the new FluxPipeline model
 
2
  import gradio as gr
3
  import torch
4
  from diffusers import FluxPipeline
5
+ import os
6
 
7
  # Load the translation model and tokenizer
8
  model_name = "facebook/mbart-large-50-many-to-one-mmt"
 
17
  # Create a pipeline for text generation using the selected model
18
  text_generator = pipeline("text-generation", model=text_model, tokenizer=text_tokenizer)
19
 
20
+ # Get the Hugging Face API token from environment variables
21
+ hf_token = os.getenv("HF_TOKEN")
22
+
23
+ # Authenticate and set up the new FluxPipeline for the text-to-image model
24
+ pipe = FluxPipeline.from_pretrained(
25
+ "black-forest-labs/FLUX.1-dev",
26
+ use_auth_token=hf_token, # Use the token for authentication
27
+ torch_dtype=torch.bfloat16
28
+ )
29
  pipe.enable_model_cpu_offload() # Enable CPU offloading to save GPU memory if needed
30
 
31
  # Function to generate an image using the new FluxPipeline model