import os | |
import json | |
import requests | |
from dotenv import load_dotenv | |
load_dotenv() | |
HF_TOKEN = os.getenv("HF_TOKEN") | |
HF_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3" | |
HEADERS = { | |
"Authorization": f"Bearer {HF_TOKEN}", | |
"Content-Type": "application/json" | |
} | |
def mistral_generate(prompt: str, max_new_tokens=128, temperature=0.7) -> str: | |
payload = { | |
"inputs": prompt, | |
"parameters": { | |
"max_new_tokens": max_new_tokens, | |
"temperature": temperature | |
} | |
} | |
try: | |
response = requests.post(HF_API_URL, headers=HEADERS, data=json.dumps(payload), timeout=30) | |
response.raise_for_status() | |
result = response.json() | |
if isinstance(result, list) and result: | |
return result[0].get("generated_text", "").strip() | |
except Exception as e: | |
print("Mistral API error:", e) | |
return "" | |