from langfuse.openai import OpenAI

from dotenv import load_dotenv
load_dotenv()

# Configure the OpenAI client to use http://localhost:11434/v1 as base url 
client = OpenAI(
    base_url = 'http://localhost:11434/v1',
    api_key='ollama', # required, but unused
)

response = client.chat.completions.create(
  #model="deepseek-r1:1.5b",
  model="qwen3:0.6b",
  messages=[
    {"role": "system", "content": "You are a helpful assistant."},
    {"role": "user", "content": "Who was the first person to step on the moon?"},
    {"role": "assistant", "content": "Neil Armstrong was the first person to step on the moon on July 20, 1969, during the Apollo 11 mission."},
    {"role": "user", "content": "What were his first words when he stepped on the moon?"}
  ],
  metadata={
    "langfuse_session_id": "2474c876-1d7f-4298-8f5d-249b9e7dfc2a",
    "langfuse_user_id": "sufe"}
)
print(response.choices[0].message.content)