Spaces:
Runtime error
Runtime error
| #V03 | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| import torch | |
| # Modèle à utiliser | |
| model_name = "fbaldassarri/tiiuae_Falcon3-1B-Instruct-autogptq-int8-gs128-asym" | |
| def load_model(): | |
| """Charge le modèle et le tokenizer""" | |
| model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True) | |
| tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True) | |
| return model, tokenizer | |
| def generate_text(model, tokenizer, input_text, max_length, temperature): | |
| """Génère du texte en utilisant le modèle""" | |
| inputs = tokenizer(input_text, return_tensors="pt") | |
| output = model.generate(**inputs, max_length=max_length, temperature=temperature) | |
| return tokenizer.decode(output[0], skip_special_tokens=True) | |
| def main(input_text, max_length, temperature): | |
| """Fonction principale pour générer le texte""" | |
| model, tokenizer = load_model() | |
| generated_text = generate_text(model, tokenizer, input_text, max_length, temperature) | |
| return generated_text | |
| demo = gr.Blocks() | |
| with demo: | |
| gr.Markdown("# Modèle de Langage") | |
| with gr.Row(): | |
| input_text = gr.Textbox(label="Texte d'entrée") | |
| with gr.Row(): | |
| max_length_slider = gr.Slider(50, 500, label="Longueur maximale", value=200) | |
| temperature_slider = gr.Slider(0.1, 1.0, label="Température", value=0.7) | |
| with gr.Row(): | |
| submit_button = gr.Button("Soumettre") | |
| output_text = gr.Textbox(label="Texte généré") | |
| submit_button.click( | |
| main, | |
| inputs=[input_text, max_length_slider, temperature_slider], | |
| outputs=output_text, | |
| queue=False | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |