my_awesome_qa_model / chatbot.py
ajlao's picture
Create chatbot.py
606799e verified
raw
history blame
No virus
1.86 kB
def chat_with_model():
print("Welcome to the Question Answering Chatbot! (type 'exit' to quit)")
while True:
question = input("You: ")
if question.lower() == 'exit':
print("Goodbye!")
break
context = input("Context: ")
if context.lower() == 'exit':
print("Goodbye!")
break
response = question_answerer(question=question, context=context)
answer = response['answer']
score = response['score']
print(f"Bot: {answer} (confidence: {score:.2f})")
# Save chat function code in a script
with open('chatbot.py', 'w') as f:
f.write('''
from transformers import AutoTokenizer, TFAutoModelForQuestionAnswering, pipeline
# Load tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("my_awesome_qa_model")
model = TFAutoModelForQuestionAnswering.from_pretrained("my_awesome_qa_model")
# Create a pipeline for question answering
question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer)
# Define the chat function
def chat_with_model():
print("Welcome to the Question Answering Chatbot! (type 'exit' to quit)")
while True:
question = input("You: ")
if question.lower() == 'exit':
print("Goodbye!")
break
context = input("Context: ")
if context.lower() == 'exit':
print("Goodbye!")
break
response = question_answerer(question=question, context=context)
answer = response['answer']
score = response['score']
print(f"Bot: {answer} (confidence: {score:.2f})")
# Run the chat function
if __name__ == "__main__":
chat_with_model()
''')
print("Chatbot script 'chatbot.py' has been created.")
if __name__ == "__main__":
chat_with_model()