from transformers import pipeline, Conversation import gradio as gr #这个Space主要是演示了可以直接使用Huggingface的pipeline构建AIApp,然后还刚好可以和Gradio的ChatInterface对应上! chatbot = pipeline(model="facebook/blenderbot-400M-distill") #Working! #https://huggingface.co/facebook/blenderbot-400M-distill/tree/main #这个模型文件大小:730MB或1.46GB #https://huggingface.co/facebook/blenderbot-400M-distill/tree/main?library=true # Use a pipeline as a high-level helper #from transformers import pipeline #pipe = pipeline("conversational", model="facebook/blenderbot-400M-distill") #chatbot = pipeline(model="HuggingFaceH4/starchat-beta") #https://huggingface.co/HuggingFaceH4/starchat-beta/tree/main #由于这个模型太大了(9.96+9.86+9.86+1.36GB),会导致如下错误: #Runtime error #Memory limit exceeded (16Gi) #chatbot = pipeline(model="...") message_list = [] response_list = [] def vanilla_chatbot(message, history): conversation = Conversation(text=message, past_user_inputs=message_list, generated_responses=response_list) conversation = chatbot(conversation) return conversation.generated_responses[-1] demo_chatbot = gr.ChatInterface(vanilla_chatbot, title="Vanilla Chatbot", description="Enter text to start chatting.") demo_chatbot.launch()