pragneshbarik commited on
Commit
a741634
1 Parent(s): 35cbe02

removed sliding window, as token size increased

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -132,7 +132,7 @@ with st.sidebar:
132
  label="Temperature", min_value=0.0, max_value=1.0, step=0.1, value=0.9)
133
 
134
  st.session_state.max_tokens = st.slider(
135
- label="New tokens to generate", min_value = 64, max_value=1048, step= 32, value=256
136
  )
137
 
138
  st.session_state.repetion_penalty = st.slider(
@@ -203,8 +203,7 @@ if prompt := st.chat_input("Chat with Ikigai Docs..."):
203
  len_response = (len(prompt.split()) + len(full_response.split())) * 1.25
204
  st.session_state["tokens_used"] = len_response + st.session_state["tokens_used"]
205
 
206
- if st.session_state["tokens_used"] > 12000 :
207
- st.session_state.history = st.session_state.history[-3:]
208
 
209
  st.session_state.history.append([prompt, full_response])
210
  st.session_state.history.append(identity_change)
 
132
  label="Temperature", min_value=0.0, max_value=1.0, step=0.1, value=0.9)
133
 
134
  st.session_state.max_tokens = st.slider(
135
+ label="New tokens to generate", min_value = 64, max_value=2048, step= 32, value=512
136
  )
137
 
138
  st.session_state.repetion_penalty = st.slider(
 
203
  len_response = (len(prompt.split()) + len(full_response.split())) * 1.25
204
  st.session_state["tokens_used"] = len_response + st.session_state["tokens_used"]
205
 
206
+
 
207
 
208
  st.session_state.history.append([prompt, full_response])
209
  st.session_state.history.append(identity_change)