Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -75,17 +75,7 @@ def run_gpt(
|
|
75 |
resp = ""
|
76 |
for response in stream:
|
77 |
resp += response.token.text
|
78 |
-
|
79 |
-
resp = openai.ChatCompletion.create(
|
80 |
-
model=MODEL,
|
81 |
-
messages=[
|
82 |
-
{"role": "system", "content": content},
|
83 |
-
],
|
84 |
-
temperature=0.0,
|
85 |
-
max_tokens=max_tokens,
|
86 |
-
stop=stop_tokens if stop_tokens else None,
|
87 |
-
)["choices"][0]["message"]["content"]
|
88 |
-
'''
|
89 |
if VERBOSE:
|
90 |
print(LOG_RESPONSE.format(resp))
|
91 |
return resp
|
@@ -144,7 +134,7 @@ def call_main(purpose, task, history, directory, action_input):
|
|
144 |
print (f'ACTION_INPUT :: {action_input}')
|
145 |
|
146 |
history += "{}\n".format(line)
|
147 |
-
if
|
148 |
task = "END"
|
149 |
return action_name, action_input, history, task
|
150 |
else:
|
@@ -311,7 +301,7 @@ NAME_TO_FUNC = {
|
|
311 |
|
312 |
|
313 |
def run_action(purpose, task, history, directory, action_name, action_input):
|
314 |
-
if
|
315 |
task="END"
|
316 |
return action_name, action_input, history, task
|
317 |
|
|
|
75 |
resp = ""
|
76 |
for response in stream:
|
77 |
resp += response.token.text
|
78 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
if VERBOSE:
|
80 |
print(LOG_RESPONSE.format(resp))
|
81 |
return resp
|
|
|
134 |
print (f'ACTION_INPUT :: {action_input}')
|
135 |
|
136 |
history += "{}\n".format(line)
|
137 |
+
if "COMPLETE" in action_name or "COMPLETE" in action_input:
|
138 |
task = "END"
|
139 |
return action_name, action_input, history, task
|
140 |
else:
|
|
|
301 |
|
302 |
|
303 |
def run_action(purpose, task, history, directory, action_name, action_input):
|
304 |
+
if "COMPLETE" in action_name:
|
305 |
task="END"
|
306 |
return action_name, action_input, history, task
|
307 |
|