chatglm-6b-int4 / load_model.py
NewBreaker's picture
auto git
9768cd8
raw
history blame contribute delete
No virus
419 Bytes
from transformers import AutoTokenizer, AutoModel,pipeline
#
# tokenizer = AutoTokenizer.from_pretrained(".\\", trust_remote_code=True)
# # model = AutoModel.from_pretrained(".\\", trust_remote_code=True).float()
# model = AutoModel.from_pretrained(".\\", trust_remote_code=True)
# model = model.eval()
# response, history = model.chat(tokenizer, "你好", history=[])
# print("response:", response)
npl = pipeline('')