File size: 1,891 Bytes
9a9e471
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3392dd6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
#!/usr/bin/env python
# coding: utf-8

import streamlit as st
from tensorflow.keras.models import load_model
from transformers import logging

# Hugging Face Transformers kütüphanesinden gelen hataları kapat
logging.set_verbosity_error()


st.title('Chat with Me')
model = load_model('text_classification_model.h5')

# Chat history
if 'chat' not in st.session_state:
    st.session_state.chat = model.start_chat(history=[])

# Input for user question
soru = st.text_input('Sor:')

# Function to extract text parts from the response
def extract_text(response_parts):
    return response_parts[0].text.strip() if response_parts[0].text else ""

# Handle "Sor" button click
if st.button('Sor'):
    if soru:
        response = st.session_state.chat.send_message(soru)
        st.session_state.chat.history.append({'role': 'model', 'text': extract_text(response.parts)})
        st.session_state.chat.history.append({'role': 'user', 'text': soru})
        st.session_state.last_question = soru
        st.session_state.last_response = extract_text(response.parts)
        st.experimental_rerun()

# Display chat history
for message in reversed(st.session_state.chat.history):
    if message.role == 'user':
        st.markdown(f'<div style="text-align: right; background-color: #2F2F2F; padding: 10px; border-radius: 10px; margin: 10px; width: fit-content;">👤 Sen: {message["text"]}</div>', unsafe_allow_html=True)
    elif message.role == 'model':
        st.markdown(f'<div style="text-align: left; background-color: #2E2E2E; padding: 10px; border-radius: 10px; margin: 10px; width: fit-content;">🤖 Bot: {message["text"]}</div>', unsafe_allow_html=True)

# Handle "Yeni Sohbet" button click
if st.button('Yeni Sohbet'):
    st.session_state.chat = model.start_chat(history=[])
    st.session_state.last_question = ''
    st.session_state.last_response = ''
    st.experimental_rerun()