import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.example_selectors import LengthBasedExampleSelector
from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate

load_dotenv(verbose=True)

examples = [
    {"input": "happy", "output": "sad"},
    {"input": "tall", "output": "short"},
    {"input": "big", "output": "small"},
    {"input": "fast", "output": "slow"},
    {"input": "高兴", "output": "悲伤"},
]

example_prompt = PromptTemplate(
    input_variables=["input", "output"],
    template="原词：{input}\n反义：{output}"
)

example_selector = LengthBasedExampleSelector(
    examples=examples,
    example_prompt=example_prompt,
    max_length=20,
)

dynamic_prompt = FewShotPromptTemplate(
    example_selector=example_selector,
    example_prompt=example_prompt,
    prefix="给出输入词的反义词",
    suffix="原词：{input}\n反义：",
    input_variables=["input"],
)

# print(dynamic_prompt.format(input="weak"))

# long_str = "happy tall big fast slow good bad strong weak cold hot wet dry light dark"
# print(dynamic_prompt.format(input=long_str))

llm = ChatOpenAI(
    model="gpt-4o-mini",
    temperature=0,
    openai_api_base=os.getenv("OPENAI_API_BASE"),
    openai_api_key=os.getenv("OPENAI_API_KEY")
)

chain = dynamic_prompt | llm

print(chain.invoke({"input": "weak"}))
