import os
import yaml

# 获取当前文件的目录
current_dir = os.path.dirname(os.path.abspath(__file__))

# 获取项目根目录
project_root = os.path.abspath(os.path.join(current_dir, '..'))

# 构建 config.yaml 文件的路径
config_path = os.path.join(project_root, 'config.yaml')
print(config_path)

# 读取 YAML 配置文件
with open(config_path, 'r', encoding='utf-8') as file:
    config = yaml.safe_load(file)

# 读取配置项
default_models = config['default']['models']

openai_model_name = config['OPENAI']['model_name']
openai_model_path = config['OPENAI']['model_path']
openai_api_key = config['OPENAI']['api_key']
openai_top_p = config['OPENAI']['top_p']
openai_temperature = config['OPENAI']['temperature']
openai_max_tokens = config['OPENAI']['max_tokens']

mistralai_model_name = config['MISTRALAI']['model_name']
mistralai_api_key = config['MISTRALAI']['api_key']
mistralai_top_p = config['MISTRALAI']['top_p']
mistralai_temperature = config['MISTRALAI']['temperature']
mistralai_max_tokens = config['MISTRALAI']['max_tokens']

# 打印读取的配置项
print(f"DEFAULT_MODELS: {default_models}")
print(f"OPENAI_MODEL_NAME: {openai_model_name}")
print(f"OPENAI_MODEL_PATH: {openai_model_path}")
print(f"OPENAI_API_KEY: {openai_api_key}")
print(f"OPENAI_TOP_P: {openai_top_p}")
print(f"OPENAI_TEMPERATURE: {openai_temperature}")
print(f"OPENAI_MAX_TOKENS: {openai_max_tokens}")

print(f"MISTRALAI_MODEL_NAME: {mistralai_model_name}")
print(f"MISTRALAI_API_KEY: {mistralai_api_key}")
print(f"MISTRALAI_TOP_P: {mistralai_top_p}")
print(f"MISTRALAI_TEMPERATURE: {mistralai_temperature}")
print(f"MISTRALAI_MAX_TOKENS: {mistralai_max_tokens}")
