from llm_set_up.llm_core import run_chat
from utils.set_logging import setup_logging
import logging
import os
from dotenv import load_dotenv
load_dotenv()

print(os.getenv('USE_OPENAI_API'))

USE_OPENAI_API = os.getenv('USE_OPENAI_API', 'False').lower() in ('true', '1', 'yes')

def main():
    setup_logging()
    if USE_OPENAI_API:
        print(f"Using OpenAI backend {USE_OPENAI_API}")
    else:
        print(f"Using Ollama backend {USE_OPENAI_API}")
    run_chat(USE_OPENAI_API=USE_OPENAI_API)


if __name__ == "__main__":
    main()