from langchain_openai import ChatOpenAI
from langchain_ollama import ChatOllama
from langchain.chat_models.base import init_chat_model
import streamlit as st

def load_llm_model(model_config):
    provider = model_config["provider"]
    other_config = {}
    for k, v in model_config.items():
        if k not in ["provider"]:
            other_config[k] = v
    if provider == "ollama":
        _LLM_MODEL = ChatOllama(**other_config)
    elif provider == "openai":
        _LLM_MODEL = ChatOpenAI(**other_config)
    else:
        _LLM_MODEL = init_chat_model(model=model_config["model"], model_provider=model_config["provider"])
    return _LLM_MODEL