from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
import os
from langchain_community.llms import SparkLLM
from langchain_core.prompt_values import PromptValue
from langchain_core.prompts import PromptTemplate, StringPromptTemplate, FewShotPromptTemplate, \
    SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate

from ChatGLM_new import xinghuo_llm


#num = xinghuo_llm.get_num_tokens("fasfdsafreqwrwe")
#print(num)


messages = [
    AIMessage(content="Hi."),
    SystemMessage(content="你是文学家,只会回答文学问题."),
    HumanMessage(content="写一首四句的诗,写一幅对联"),
]
#res = xinghuo_llm.invoke(messages)
#print(res.content)



template = """
我希望你能充当新公司的命名顾问。
一个生产{product}的公司的好名字是什么？
"""

prompt = PromptTemplate(
    input_variables=["product"],
    template=template,
)
temp_prompt = prompt.format(product="彩色袜子")
print(temp_prompt)



# 多个输入变量的示例提示
multiple_input_prompt = PromptTemplate(
    input_variables=["adjective", "content"],
    template="给我讲一个{adjective}的关于{content}的笑话。"
)
multiple_input_prompt_temp = multiple_input_prompt.format(adjective="有趣的", content="小鸡")
print(multiple_input_prompt_temp)



print("################少样本提示模板#########################")
examples = [
  {
    "question": "Who lived longer, Muhammad Ali or Alan Turing?",
    "answer":
"""
Are follow up questions needed here: Yes.
Follow up: How old was Muhammad Ali when he died?
Intermediate answer: Muhammad Ali was 74 years old when he died.
Follow up: How old was Alan Turing when he died?
Intermediate answer: Alan Turing was 41 years old when he died.
So the final answer is: Muhammad Ali
"""
  },
  {
    "question": "When was the founder of craigslist born?",
    "answer":
"""
Are follow up questions needed here: Yes.
Follow up: Who was the founder of craigslist?
Intermediate answer: Craigslist was founded by Craig Newmark.
Follow up: When was Craig Newmark born?
Intermediate answer: Craig Newmark was born on December 6, 1952.
So the final answer is: December 6, 1952
"""
  },
  {
    "question": "Who was the maternal grandfather of George Washington?",
    "answer":
"""
Are follow up questions needed here: Yes.
Follow up: Who was the mother of George Washington?
Intermediate answer: The mother of George Washington was Mary Ball Washington.
Follow up: Who was the father of Mary Ball Washington?
Intermediate answer: The father of Mary Ball Washington was Joseph Ball.
So the final answer is: Joseph Ball
"""
  },
  {
    "question": "Are both the directors of Jaws and Casino Royale from the same country?",
    "answer":
"""
Are follow up questions needed here: Yes.
Follow up: Who is the director of Jaws?
Intermediate Answer: The director of Jaws is Steven Spielberg.
Follow up: Where is Steven Spielberg from?
Intermediate Answer: The United States.
Follow up: Who is the director of Casino Royale?
Intermediate Answer: The director of Casino Royale is Martin Campbell.
Follow up: Where is Martin Campbell from?
Intermediate Answer: New Zealand.
So the final answer is: No
"""
  }
]

example_prompt = PromptTemplate(input_variables=["question", "answer"], template="Question: {question}\n{answer}")

print(example_prompt.format(**examples[0]))

prompt = FewShotPromptTemplate(
    examples=examples,
    example_prompt=example_prompt,
    suffix="Question: {input}",
    input_variables=["input"]
)

print(prompt.format(input="Who was the father of Mary Ball Washington?"))


#res = xinghuo_llm.invoke(prompt.format(input="Who was the father of Mary Ball Washington?"))
#print(res.content)

print("\n")
print("#################聊天模型###########################")
batch_messages = [
    [
        SystemMessage(content="You are a helpful assistant that translates English to Chinese."),
        HumanMessage(content="I love programming.")
    ],
    [
        SystemMessage(content="You are a helpful assistant that translates English to Chinese."),
        HumanMessage(content="I love artificial intelligence.")
    ],
]
# result = xinghuo_llm.generate(batch_messages)
#print(result)

template="You are a helpful assistant that translates {input_language} to {output_language}."
system_message_prompt = SystemMessagePromptTemplate.from_template(template)
human_template="{text}"
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)
chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt])

# 获取格式化后的消息的聊天完成结果
print(chat_prompt.format_prompt(input_language="English", output_language="Chinese", text="I love programming.").to_messages())
res = xinghuo_llm.invoke(chat_prompt.format_prompt(input_language="English", output_language="Chinese", text="I love programming.").to_messages())
print(res)