# model_config.py
from langchain_openai import ChatOpenAI


def create_llm_model(api_key, base_url, model_name, temperature, max_tokens):
    return ChatOpenAI(
        api_key=api_key,
        base_url=base_url,
        model=model_name,
        temperature=temperature,
        max_tokens=max_tokens
    )
