kronos25's picture
Update app.py
c2a658a verified
raw
history blame
1.28 kB
import gradio as gr
from huggingface_hub import InferenceClient
import spacy
from transformers import GenerationConfig, T5Tokenizer, T5ForConditionalGeneration
import json
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def predict(message, history):
model = T5ForConditionalGeneration.from_pretrained('kronos25/Temporal_Chatbot')
tokenizer = T5Tokenizer.from_pretrained('kronos25/Temporal_Chatbot')
input = message + '\n'
inputs = tokenizer(input, return_tensors="pt")
outputs = model.generate(**inputs,max_length=100)
model_result = tokenizer.decode(outputs[0], skip_special_tokens=True)
return model_result + '\n'
gr.ChatInterface(
predict,
chatbot=gr.Chatbot(height=300),
textbox=gr.Textbox(placeholder="Ask me anything.", container=False, scale=7),
title="Temporal Chatbot",
description="Ask Temporal Chatbot any question",
theme="soft",
examples=["Is the doctor available tomorrow?"],
cache_examples=True,
retry_btn=None,
undo_btn="Delete Previous",
clear_btn="Clear"
).launch(share=True)
if __name__ == "__main__":
demo.launch()