import gradio

from transformers import AutoTokenizer, AutoModel
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_community.llms import HuggingFacePipeline
from transformers import pipeline
import time
from langchain.prompts import PromptTemplate

from transformers import AutoModelForCausalLM



# from ChatGLM_new import zhipu_llm
# model  = zhipu_llm 


# model = HuggingFacePipeline.from_model_id(
#     model_id="THUDM/chatglm3-6b",
#     task="text-generation",
#     device=0,
#     model_kwargs={"trust_remote_code":True},
#     pipeline_kwargs={"max_new_tokens": 5000},
# )



model_id = "THUDM/chatglm3-6b"
model = AutoModelForCausalLM.from_pretrained(
    model_id,
    torch_dtype="auto",
    trust_remote_code=True ,
    # device="0",
    temperature= 0.9,
    do_sample= True,
    cache_dir="D:/chatglm3-6b/"
).half().cuda()

model = model.eval()
tokenizer = AutoTokenizer.from_pretrained(model_id,
                                          trust_remote_code=True, 
                                          cache_dir="D:/chatglm3-6b/")

pipe = pipeline(
    "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=8000
)
hf = HuggingFacePipeline(pipeline=pipe)


prompt = ChatPromptTemplate.from_template("{user_input}")


# prompt_template = PromptTemplate.from_template(template, **kwargs)
# message = HumanMessagePromptTemplate(prompt=prompt_template)
#         return cls.from_messages([message])




# prompt = ChatPromptTemplate.from_messages([
#                 ("system", "记住：对所有问题你只回答下面的4个字：我不知道，"),
#                 # ("human", "Hello, how are you doing?"),
#                 # ("ai", "I'm doing well, thanks!"),
#                 ("human", "{user_input}"),
#             ])


output_parser = StrOutputParser()
chain = prompt | hf | output_parser
def greet(name):
    response = chain.invoke({"user_input": name})
    return response
demo = gradio.Interface(fn=greet, inputs="text", outputs="text")
demo.launch(server_name="0.0.0.0") 