File size: 450 Bytes
29db9d8
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# utils.py

import requests

def send_prompt_to_llm(prompt, model="llama3", endpoint="http://localhost:11434/v1/chat/completions"):
    response = requests.post(
        endpoint,
        json={
            "model": model,
            "messages": [{"role": "user", "content": prompt}],
        },
    )
    response.raise_for_status()  # Raise an error for failed requests
    return response.json()["choices"][0]["message"]["content"]