File size: 8,940 Bytes
939a951
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dc7ed87
939a951
 
 
 
 
c58ade8
 
 
939a951
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c58ade8
 
939a951
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
import json
import streamlit as st
from streamlit_modal import Modal
import streamlit.components.v1 as components
from clients import OllamaClient, NvidiaClient, GroqClient

st.set_page_config(
    page_title="Prompts Library",
    layout="wide",
)

# Cache the header of the app to prevent re-rendering on each load
@st.cache_resource
def display_app_header():
    """Display the header of the Streamlit app."""
    st.title("Prompts Library")
    st.subheader("ChatBot with prompt templates")

# Display the header of the app
display_app_header()

# Some style
st.markdown(
    '<style>div[key^="edit-modal"] {top: 25px;}</style>', unsafe_allow_html=True
)


# UI sidebar ##########################################
def ui_llm(provider):
    if api_token := st.sidebar.text_input("Enter your API Key", type="password", key=f"API_{provider}"):
        provider_models = llm_providers[st.session_state["llm_provider"]](
            api_key=api_token
        ).list_models_names()
        if provider_models:
            llm = st.sidebar.radio(
                "Select your model", provider_models, key="llm"
            )
        else:
            st.sidebar.error("Ollama is not running, or there is a problem with the selected LLM provider")
    else:
        st.sidebar.warning("You must enter your API key")

st.sidebar.subheader("Models")

# LLM
llm_providers = {
    "Cloud Groq": GroqClient,
    "Cloud Nvidia": NvidiaClient,
    "Local Ollama": OllamaClient,
}
if llm_provider := st.sidebar.radio(
    "Choose your LLM Provider", llm_providers.keys(), key="llm_provider"
):
    ui_llm(st.session_state["llm_provider"])

# LLM parameters
st.sidebar.subheader("Parameters")
max_tokens = st.sidebar.number_input("Token numbers", value=1024, key="max_tokens")
temperature = st.sidebar.slider(
    "Temperature", min_value=0.0, max_value=1.0, value=0.5, step=0.1, key="temperature"
)
top_p = st.sidebar.slider(
    "Top P", min_value=0.0, max_value=1.0, value=0.7, step=0.1, key="top_p"
)

# helpers functions ########################################

def edit_form(form_name, title=None, source=None, system=None, user=None):
    """
    Creates a form for editing a prompt template.

    Args:
        form_name: The name of the form.
        title: The title of the prompt template (optional).
        source: The source of the prompt template (optional).
        system: The system example instruction (optional).
        user: The user example instruction (optional).

    Returns:
        None
    """
    with st.form(form_name, clear_on_submit=False, border=True):
        new_title = st.text_input("Name", value=title)
        new_source = st.text_input("Source", value=source)
        new_system = st.text_area("System example instruction", value=system)
        new_user = st.text_area("User example instruction", value=user)
        if st.form_submit_button("Submit"):
            rec = {
                "title": new_title,
                "source": new_source,
                "messages": [
                    {"role": "system", "content": new_system},
                    {"role": "user", "content": new_user},
                ],
            }
            if title is not None:
                delete_prompt(title)
            add_prompt(rec)


def read_prompts_file():
    """
    Loads the prompts from the "prompts.json" file.

    Returns:
        A list of prompt templates.
    """
    prompts_file = open("prompts.json", encoding="utf-8")
    return json.load(prompts_file)


def add_prompt(rec):
    """
    Adds a new prompt to the "prompts.json" file.

    Args:
        rec: The new prompt to add.

    Returns:
        None
    """
    with open("prompts.json", "r", encoding="utf-8") as fp:
        listObj = json.load(fp)
    listObj.append(rec)
    print(listObj)
    with open("prompts.json", "w") as outfile:
        outfile.write(json.dumps(listObj, indent=4, sort_keys=True))
    st.rerun()


def edit_prompt(title):
    """
    Edits a prompt template.

    Args:
        title: The title of the prompt to edit.

    Returns:
        A dictionary containing the edited prompt information.
    """
    with open("prompts.json", "r", encoding="utf-8") as fp:
        listObj = json.load(fp)
    rec = [i for i in listObj if i["title"].strip() == title.strip()]
    rec_messages = rec[0]["messages"]
    return edit_form(
        "prompt_edit",
        title=title,
        source=[x["source"] for x in rec][0],
        system=[x["content"] for x in rec_messages if x["role"] == "system"][0],
        user=[x["content"] for x in rec_messages if x["role"] == "user"][0],
    )


def delete_prompt(title):
    """
    Removes a prompt template from the "prompts.json" file.

    Args:
        title: The title of the prompt to delete.
    """
    with open("prompts.json", "r", encoding="utf-8") as fp:
        listObj = json.load(fp)
    recs = [i for i in listObj if not (i["title"].strip() == title.strip())]
    with open("prompts.json", "w") as outfile:
        outfile.write(json.dumps(recs, indent=4, sort_keys=True))


def get_llm_response(system, prompt):
    """
    Generates a response from the selected LLM.

    Args:
        system: The system input from the user.
        prompt: The user prompt.

    Returns:
        The response from the LLM.
    """
    options = dict(
        max_tokens=st.session_state["max_tokens"],
        top_p=st.session_state["top_p"],
        temperature=st.session_state["temperature"],
    )
    return llm_providers[st.session_state["llm_provider"]](
        api_key=st.session_state[f"API_{st.session_state['llm_provider']}"],
        model=st.session_state["llm"],
    ).api_chat_completion(system, prompt, **options)


def generate(system, prompt):
    st.session_state.messages.append({"role": "system", "content": system})
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("assistant"):
        response = get_llm_response(
            system, prompt
        )
        st.markdown(response)
        # Add assistant response to chat history
        st.session_state.messages.append({"role": "assistant", "content": response})

def clear():
    for key in st.session_state.keys():
        del st.session_state[key]
# UI main #####################################################

tab1, tab2 = st.tabs(["Prompts Library", "Chatbot"])
with tab1:
    new_modal = Modal(
        "Add prompt",
        key="edit-modal",
    )
    if new_prompt_modal := st.button("➕ Add a prompt template"):
        new_modal.open()
    if new_modal.is_open():
        with new_modal.container():
            edit_form("prompt_add")
    prompts = read_prompts_file()
    grids = range(1, len(prompts) + 1)
    cols = st.columns([1, 1])
    wcol = 2
    for f, b in zip(prompts, grids):
        col = cols[b % wcol]
        with col:
            with st.expander(f["title"].upper()):
                if st.button(f"✔️ Select prompt {f['title'].upper()} and go to Chatbot tab", type="secondary"):
                        # can do better here
                        st.session_state["init_messages"] = f["messages"]
                        st.session_state.init_system = f["messages"][0]["content"]
                        st.session_state.init_user = f["messages"][1]["content"]
                edit_modal = Modal(
                    f"Edit prompt {f['title'].upper()}",
                    key=f"edit-modal_{f['title']}",
                )
                if edit_prompt_modal := st.button(
                    f"✏️ Edit {f['title'].upper()}", type="secondary"
                ):
                    edit_modal.open()
                if edit_modal.is_open():
                    with edit_modal.container():
                        edit_prompt(f["title"])
                st.write(f"Source : {f['source']}")
                st.markdown(f"- System : {f['messages'][0]['content']}")
                st.markdown(f"- User: {f['messages'][1]['content']}")
                st.divider()
                if st.button(f"❌ Delete prompt {f['title'].upper()}", type="primary"):
                    delete_prompt(f["title"])
                    st.rerun()
with tab2:
    if st.button("Clear chatbot history", type="secondary"):
        clear()
    # Initialize chat history
    if "messages" not in st.session_state:
        st.session_state.messages = []
    # Display chat messages from history on app rerun
    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.markdown(message["content"])
    # React to user input
    if "init_messages" in st.session_state:
        system = st.text_area(":blue[System]", key="init_system")
        prompt = st.text_area(":blue[User]", key="init_user")
    else:
        system = st.text_area(":blue[System]")
        prompt = st.text_area(":blue[User]")
    if st.button("Generate", type="primary"):
        generate(system, prompt)