File size: 1,686 Bytes
f483191
 
 
 
 
 
 
 
e4aeeee
03b857c
1f1b415
f74f4fd
 
03b857c
 
f483191
55ea56c
 
f483191
 
 
 
 
555d0f1
f483191
 
 
 
 
 
30f2e2b
 
 
 
 
 
 
 
 
55ea56c
 
f483191
 
55ea56c
f483191
 
 
1f1b415
3d68800
 
f483191
55ea56c
f483191
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 21 22:17:43 2023

@author: Loges
"""

import streamlit as st
import sentencepiece
from transformers import pipeline, T5Tokenizer, T5ForConditionalGeneration

model=T5ForConditionalGeneration.from_pretrained("Logeswaransr/T5_MineAI_Prototype").to("cpu")
tokenizer=T5Tokenizer.from_pretrained("Logeswaransr/T5_MineAI_Prototype")

pipe=pipeline('text2text-generation', model=model, tokenizer=tokenizer)

greetings=["Hello! My name is MineAI, A specially trained LLM here to assist you on your Mining Related Queries.","How may I help you?"]

st.set_page_config(page_title='Sample Chatbot', layout='wide')

if 'messages' not in st.session_state:
    st.session_state.messages=[]

st.subheader("Mine AI")

for message in st.session_state.messages:
    with st.chat_message(message['role']):
        st.markdown(message['content'])
        
## messages element format: {'role':'user', 'content':'<user prompt>'}
        
if st.session_state.messages==[]:
    for gr in greetings:
        with st.chat_message("assistant"):
            st.markdown(gr)
    
        st.session_state.messages.append({
            'role':'assistant',
            'content': gr})

if prompt:=st.chat_input("Enter your query"):
    with st.chat_message("user"):
        st.markdown(prompt)

    st.session_state.messages.append({
        'role':'user',
        'content': prompt})

    out=pipe(prompt)
    response=out[0]['generated_text']
    
    # response = f"Analysis: {response}"
    
    with st.chat_message("assistant"):
        st.markdown(response)
        
    st.session_state.messages.append({
        'role':'assistant',
        'content': response})