import os import requests import gradio as gr api_token = os.environ.get("TOKEN") API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct" headers = {"Authorization": f"Bearer {api_token}"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() def analyze_sentiment(text): output = query({ "inputs": f'''<|begin_of_text|> <|start_header_id|>system<|end_header_id|> You'll only answer in English. <|eot_id|> <|start_header_id|>user<|end_header_id|> {text} <|eot_id|> <|start_header_id|>assistant<|end_header_id|> ''' }) # Assurez-vous de gérer correctement la sortie de l'API if isinstance(output, list) and len(output) > 0: return output[0].get('generated_text', 'Erreur: Réponse inattendue') else: return "Erreur: Réponse inattendue de l'API" demo = gr.Interface( fn = analyze_sentiment, inputs=["text"], outputs=["text"], ) demo.launch()