Spaces:
Running
Running
acecalisto3
commited on
Commit
•
2467450
1
Parent(s):
ffbc586
Update agent.py
Browse files
agent.py
CHANGED
@@ -49,6 +49,24 @@ def run_gpt(
|
|
49 |
print(LOG_RESPONSE.format(resp))
|
50 |
return resp
|
51 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
def compress_history(purpose, task, history, directory):
|
54 |
module_summary, _, _ = read_python_module_structure(directory)
|
|
|
49 |
print(LOG_RESPONSE.format(resp))
|
50 |
return resp
|
51 |
|
52 |
+
def generate(prompt, history, temperature):
|
53 |
+
seed = random.randint(1, 1111111111111111)
|
54 |
+
generate_kwargs = {
|
55 |
+
"temperature": temperature,
|
56 |
+
"max_new_tokens": 256,
|
57 |
+
"top_p": 0.95,
|
58 |
+
"repetition_penalty": 1.0,
|
59 |
+
"do_sample": True,
|
60 |
+
"seed": seed,
|
61 |
+
}
|
62 |
+
formatted_prompt = format_prompt_var(f"{prompt}", history)
|
63 |
+
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
64 |
+
output = ""
|
65 |
+
|
66 |
+
for response in stream:
|
67 |
+
output += response.token.text
|
68 |
+
yield output
|
69 |
+
|
70 |
|
71 |
def compress_history(purpose, task, history, directory):
|
72 |
module_summary, _, _ = read_python_module_structure(directory)
|