Logeswaransr's picture
Update app.py
4dddcfe
raw
history blame
1.46 kB
"""
Created on Sun Oct 01 10:49:43 2023
@author: Loges
"""
import streamlit as st
import sentencepiece
from transformers import pipeline, T5Tokenizer, T5ForConditionalGeneration
model=T5ForConditionalGeneration.from_pretrained("Logeswaransr/AI_Chaperone").to("cpu")
tokenizer=T5Tokenizer.from_pretrained("Logeswaransr/AI_Chaperone")
pipe=pipeline('text2text-generation', model=model, tokenizer=tokenizer)
greetings=["Hello!","I am AI Chaperone. Your special virtual assistant for your needs.", "Feel free to ask me anything. I will do what I can."]
st.set_page_config(page_title='AI Chaperone', layout='wide')
if 'messages' not in st.session_state:
st.session_state.messages=[]
for gr in greetings:
st.session_state.messages.append({
'role':'assistant',
'content': gr})
st.subheader("AI Chaperone")
for message in st.session_state.messages:
with st.chat_message(message['role']):
st.markdown(message['content'])
if prompt:=st.chat_input("Enter your query"):
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({
'role':'user',
'content': prompt})
out=pipe(prompt)
response=out[0]['generated_text']
# response = f"Analysis: {response}"
with st.chat_message("assistant"):
st.markdown(response)
st.session_state.messages.append({
'role':'assistant',
'content': response})