File size: 927 Bytes
f00f379 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import os
import json
import requests
from dotenv import load_dotenv
load_dotenv()
HF_TOKEN = os.getenv("HF_TOKEN")
HF_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
HEADERS = {
"Authorization": f"Bearer {HF_TOKEN}",
"Content-Type": "application/json"
}
def mistral_generate(prompt: str, max_new_tokens=128, temperature=0.7) -> str:
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": max_new_tokens,
"temperature": temperature
}
}
try:
response = requests.post(HF_API_URL, headers=HEADERS, data=json.dumps(payload), timeout=30)
response.raise_for_status()
result = response.json()
if isinstance(result, list) and result:
return result[0].get("generated_text", "").strip()
except Exception as e:
print("Mistral API error:", e)
return ""
|