bewchatbot / app.py
william4416's picture
Update app.py
558ed59 verified
raw
history blame
1.6 kB
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import json
# Load the pre-trained model and tokenizer
model_name = "microsoft/DialoGPT-large"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Load JSON files containing questions and answers
json_files = ["fileone.json", "filesecond.json", "filethird.json", "filefourth.json", "filefifth.json"]
question_answer_pairs = []
for file_name in json_files:
with open(file_name, "r") as file:
data = json.load(file)
question_answer_pairs.extend(data)
def generate_response(input_text):
for question_answer_pair in question_answer_pairs:
if question_answer_pair["question"].lower() in input_text.lower():
return question_answer_pair["answer"]
# If no specific answer found, use DialoGPT model
input_ids = tokenizer.encode(input_text, return_tensors="pt")
response_ids = model.generate(input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
response_text = tokenizer.decode(response_ids[0], skip_special_tokens=True)
return response_text
# Define Gradio interface
inputs = gr.Textbox(lines=3, label="Input")
outputs = gr.Textbox(label="Output")
title = "Chat with AI"
description = "This AI chatbot can answer your questions based on provided JSON files and also generate responses using DialoGPT-large model."
examples = [["What is your name?"]]
gr.Interface(fn=generate_response, inputs=inputs, outputs=outputs, title=title, description=description, examples=examples).launch()