#模型下载
from modelscope import snapshot_download
from modelscope import AutoTokenizer,AutoModel
# model_dir = snapshot_download('qwen/Qwen-72B-Chat',cache_dir='e:/models')
# import os
# os.environ['OPENAI_API_KEY'] = 'empty'
# from langchain_openai import ChatOpenAI
#
#
# llm = ChatOpenAI(base_url="http://localhost:1234/v1", api_key="lm-studio", max_tokens=256, temperature=0.0, top_p=0.9)
#
# result = llm.invoke('京东董事长刘强东的老婆是谁？她是姓章，清华大学毕业的。')
# print(result)

model_dir = snapshot_download('ZhipuAI/chatglm3-6b-32k',cache_dir='i:/models')
tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
model = AutoModel.from_pretrained(model_dir,trust_remote_code=True)
# model = model.eval()
print(model('你好！'))
# response, history = model.chat(tokenizer, "你好", history=[])
# print(response)
# response, history = model.chat(tokenizer, "晚上睡不着应该怎么办", history=history)
# print(response)