File size: 1,857 Bytes
b3d271f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import streamlit as st
import time
import requests
import json

# Streamed response emulator
def response_generator(responseDatsa):    
    for word in responseDatsa.split():
        yield word + " "
        time.sleep(0.05)

st.title("KRA ์ฑ—๋ด‡")


model_radio = st.sidebar.radio("Select model",(
    "KoAlpaca","FineTuned")
)
st.sidebar.text("ver 022715")

# Initialize chat history
if "messages" not in st.session_state:
    st.session_state.messages = []

# Display chat messages from history on app rerun
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])    



# React to user input
if prompt := st.chat_input("What is up?"):
    # Display user message in chat message container
    with st.chat_message("user"):
        st.markdown(prompt)
    # Add user message to chat history
    st.session_state.messages.append({"role": "user", "content": prompt})    


    data={
        'content': prompt
    }
    print(data)
    # REST API ๋ฅผ ํ˜ธ์ถœํ•ด์•ผํ•จ.    
    url = "http://3.39.53.42:8000/chat" if model_radio == "FineTuned" else "http://3.37.154.147:8000/chat"

    serverRsp = requests.post(url, json=data, headers={"Content-Type": "application/json"},verify=False)
    #print(json.dumps(serverRsp))
    #serverRsp = requests.get(url)
    if serverRsp.status_code == 200:
        #print(serverRsp.json())
        data = serverRsp.json()
        response = data["content"]       

    else:
        response = "์—๋Ÿฌ๊ฐ€ ๋ฐœ์ƒํ•˜์˜€์Šต๋‹ˆ๋‹ค."


    #response = f"Echo: {prompt}"

    # Display assistant response in chat message container
    with st.chat_message("assistant"):        
        st.write_stream(response_generator(response))
    # Add assistant response to chat history
    st.session_state.messages.append({"role": "assistant", "content": response})