from openai import OpenAI


base_url = 'http://10.26.32.85:11434/v1'
model_uid = "llama3.1:70b"
max_tokens = 128
temperature = 0.7

client = OpenAI(
    base_url=base_url,
    api_key='ollama',
)

def chat_model(prompt, model_uid=model_uid, max_tokens=max_tokens, temperature=temperature):

    response = client.chat.completions.create(
        model=f'{model_uid}',
        messages=[
            {
                "content": prompt,
                "role": "user",
            }
        ],
        max_tokens=max_tokens,
        temperature=temperature,
    )
    return response.choices[0].message.content

if __name__ == '__main__':
    for i in range(10):
        prompt = "tell me your favorite color"
        response = chat_model(prompt)
        print(response)