Logeswaransr's picture
Update app.py
30f2e2b
raw
history blame
1.69 kB
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 21 22:17:43 2023
@author: Loges
"""
import streamlit as st
import sentencepiece
from transformers import pipeline, T5Tokenizer, T5ForConditionalGeneration
model=T5ForConditionalGeneration.from_pretrained("Logeswaransr/T5_MineAI_Prototype").to("cpu")
tokenizer=T5Tokenizer.from_pretrained("Logeswaransr/T5_MineAI_Prototype")
pipe=pipeline('text2text-generation', model=model, tokenizer=tokenizer)
greetings=["Hello! My name is MineAI, A specially trained LLM here to assist you on your Mining Related Queries.","How may I help you?"]
st.set_page_config(page_title='Sample Chatbot', layout='wide')
if 'messages' not in st.session_state:
st.session_state.messages=[]
st.subheader("Mine AI")
for message in st.session_state.messages:
with st.chat_message(message['role']):
st.markdown(message['content'])
## messages element format: {'role':'user', 'content':'<user prompt>'}
if st.session_state.messages==[]:
for gr in greetings:
with st.chat_message("assistant"):
st.markdown(gr)
st.session_state.messages.append({
'role':'assistant',
'content': gr})
if prompt:=st.chat_input("Enter your query"):
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({
'role':'user',
'content': prompt})
out=pipe(prompt)
response=out[0]['generated_text']
# response = f"Analysis: {response}"
with st.chat_message("assistant"):
st.markdown(response)
st.session_state.messages.append({
'role':'assistant',
'content': response})