import os
import json
from dotenv import load_dotenv
from openai import OpenAI
from config import MY_MODEL, SYSTEM_PROMPT_DAMAI_TIPS

load_dotenv()

client = OpenAI(
    api_key=os.getenv("DASHSCOPE_API_KEY", default="sk-xxx"),
    base_url=os.getenv("DASHSCOPE_BASE_URL", default="https://dashscope.aliyuncs.com/compatible-mode/v1"),
)

def llm_gen(messages:list[dict], model:str=MY_MODEL):
    completion = client.chat.completions.create(
        model=model,
        messages=messages,
        extra_body={"enable_thinking": False},
    )
    result_json = completion.model_dump_json()
    return result_json

def llm_gen_damai_tips(city:str, events:list[dict], system_prompt:str=SYSTEM_PROMPT_DAMAI_TIPS, system_prompt_path:str|None=None) -> dict:
    if system_prompt_path:
        with open(system_prompt_path, "r", encoding="utf-8") as f:
            system_prompt = f.read()
    payload = {"city": city, "events": events}
    messages = [
        {"role": "system", "content": system_prompt},
        {"role": "user", "content": json.dumps(payload, ensure_ascii=False)},
    ]
    result_json = llm_gen(messages=messages)
    result_str = json.loads(result_json)["choices"][0]["message"]["content"]
    return json.loads(result_str)
