from langchain_openai import ChatOpenAI
# from langchain_ollama.llms import OllamaLLM
import os
from pathlib import Path
from core.testAgent.UserConfig import UserConfigError, get_config_section

# Agent 最大递归次数
RECURSION_LIMIT = 100

# LLM配置
_llm_config = get_config_section("llm")
OPENAI_API_BASE = _llm_config.get("apiBase") or os.environ.get("OPENAI_API_BASE")
OPENAI_API_KEY = _llm_config.get("apiKey") or os.environ.get("OPENAI_API_KEY")
LLM_MODEL = _llm_config.get("model", "deepseek-chat")

if not OPENAI_API_BASE or not OPENAI_API_KEY:
    raise UserConfigError(
        "OPENAI_API_BASE or OPENAI_API_KEY is missing. "
        "Please set them under the 'llm' section in core/config/userConfig.json."
    )

os.environ["OPENAI_API_BASE"] = OPENAI_API_BASE
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY

llm = ChatOpenAI(
    model=LLM_MODEL,
    api_key=OPENAI_API_KEY,
    base_url=OPENAI_API_BASE,
    temperature=0.2,
)
# llm = ChatOpenAI(model="gpt-4o")

# LLM是否自带原生function call功能
Enable_Native_Function_Call = True

# 是否限制对测试用例相关内容进行检索
Limit_Retrieve_Test_Case = False

# CKGConstruction.jar路径（与扩展一起分发）
_core_dir = Path(__file__).resolve().parents[1]
_bundled_jar = _core_dir / "CKGConstruction-1.0-SNAPSHOT.jar"
CKGConstruction_Jar_Path = str(_bundled_jar) if _bundled_jar.exists() else ""

# Neo4j设置
_neo4j_config = get_config_section("neo4j")
NEO4J_SERVER_URL = _neo4j_config.get("serverUrl") or os.environ.get("NEO4J_URL", "")
NEO4J_SERVER_USER = _neo4j_config.get("serverUser") or os.environ.get("NEO4J_USER", "")
NEO4J_SERVER_PASSWORD = _neo4j_config.get("serverPassword") or os.environ.get("NEO4J_PASSWORD", "")

# Java 环境
_environment_config = get_config_section("environment")
JAVA_HOME = _environment_config.get("JAVA_HOME") or os.environ.get("JAVA_HOME", "")
