Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoModel, AutoTokenizer | |
""" | |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference | |
https://huggingface.co/spaces/jclian91/Chinese_Late_Chunking/blob/main/app.py | |
""" | |
pretrained_model = "ykallan/SkuInfo-Qwen2.5-3B-Instruct" | |
# load model and tokenizer | |
tokenizer = AutoTokenizer.from_pretrained(pretrained_model, trust_remote_code=True) | |
model = AutoModel.from_pretrained(pretrained_model, trust_remote_code=True) | |
def respond( | |
sku_name, | |
history: list[tuple[str, str]], | |
system_message, | |
max_tokens, | |
temperature, | |
top_p, | |
): | |
messages = [ | |
{"role": "system", "content": "在以下商品名称中抽取出品牌、型号、主商品,并以JSON格式返回。"}, | |
{"role": "user", "content": sku_name} | |
] | |
input_ids = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) | |
model_inputs = tokenizer([input_ids], return_tensors="pt", padding=True).to(device) | |
generate_config = { | |
"max_new_tokens": 128 | |
} | |
generated_ids = model.generate(model_inputs.input_ids, **generate_config) | |
generated_ids = [ | |
output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids) | |
] | |
response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0] | |
return response | |
if __name__ == "__main__": | |
with gr.Blocks() as demo: | |
sku_name = gr.TextArea(lines=1, placeholder="your query", label="skuName") | |
submit = gr.Button("Submit") | |
examples = gr.Examples( | |
inputs=[sku_name] | |
) | |
submit.click(fn=response, | |
inputs=[sku_name], | |
) | |
demo.launch() | |