from langchain_openai import ChatOpenAI
import os
from langdev_core.llms.ollama_funcs import OllamaFunctions

llm_fc = OllamaFunctions(model="qwen2:0.5b", format="json", temperature=0.1,)

LLM_PORT = os.getenv('LLM_PORT')
if LLM_PORT==None:
    LLM_PORT = 11434

base_url = f"http://localhost:{LLM_PORT}/v1"
model_name = 'qwen2:0.5b'
print(base_url)

llm = ChatOpenAI(
	openai_api_key='API_KEY',
	base_url= base_url,
	model= model_name,
)

llm0 = ChatOpenAI(
	openai_api_key='API_KEY',
	base_url= base_url,
	model= model_name,
	temperature=0,
)

llm0_1 = ChatOpenAI(
	openai_api_key='API_KEY',
	base_url= base_url,
	model= model_name,
	temperature=0.1,
)

