Commit
·
8f01549
1
Parent(s):
37584e0
Send confirmation messge to the plot
Browse files- operators/llm_op.py +28 -7
operators/llm_op.py
CHANGED
@@ -8,12 +8,12 @@ import json
|
|
8 |
import re
|
9 |
import time
|
10 |
|
|
|
11 |
MODEL_NAME_OR_PATH = "TheBloke/deepseek-coder-6.7B-instruct-GPTQ"
|
12 |
-
# MODEL_NAME_OR_PATH = "hanspeterlyngsoeraaschoujensen/deepseek-math-7b-instruct-GPTQ"
|
13 |
|
14 |
CODE_MODIFIER_TEMPLATE = """
|
15 |
### Instruction
|
16 |
-
Respond with
|
17 |
|
18 |
```python
|
19 |
{code}
|
@@ -204,9 +204,14 @@ class Operator:
|
|
204 |
|
205 |
user_message = input["user_message"]
|
206 |
start_llm = time.time()
|
207 |
-
|
208 |
-
|
209 |
-
|
|
|
|
|
|
|
|
|
|
|
210 |
|
211 |
source_code = replace_code_in_source(code, output)
|
212 |
print("response time:", time.time() - start_llm, flush=True)
|
@@ -251,10 +256,26 @@ class Operator:
|
|
251 |
pa.array(output["data"]),
|
252 |
dora_event["metadata"],
|
253 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
else:
|
255 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
256 |
except:
|
257 |
-
|
|
|
|
|
|
|
|
|
258 |
# if data is not iterable, put data in a list
|
259 |
elif dora_event["type"] == "INPUT" and dora_event["id"] == "assistant":
|
260 |
user_message = dora_event["value"][0].as_py()
|
|
|
8 |
import re
|
9 |
import time
|
10 |
|
11 |
+
CHATGPT = False
|
12 |
MODEL_NAME_OR_PATH = "TheBloke/deepseek-coder-6.7B-instruct-GPTQ"
|
|
|
13 |
|
14 |
CODE_MODIFIER_TEMPLATE = """
|
15 |
### Instruction
|
16 |
+
Respond with one block of modified code only in ```python block. No explaination.
|
17 |
|
18 |
```python
|
19 |
{code}
|
|
|
204 |
|
205 |
user_message = input["user_message"]
|
206 |
start_llm = time.time()
|
207 |
+
if CHATGPT:
|
208 |
+
output = self.ask_chatgpt(
|
209 |
+
CODE_MODIFIER_TEMPLATE.format(code=code, user_message=user_message)
|
210 |
+
)
|
211 |
+
else:
|
212 |
+
output = self.ask_llm(
|
213 |
+
CODE_MODIFIER_TEMPLATE.format(code=code, user_message=user_message)
|
214 |
+
)
|
215 |
|
216 |
source_code = replace_code_in_source(code, output)
|
217 |
print("response time:", time.time() - start_llm, flush=True)
|
|
|
256 |
pa.array(output["data"]),
|
257 |
dora_event["metadata"],
|
258 |
)
|
259 |
+
|
260 |
+
send_output(
|
261 |
+
"assistant_message",
|
262 |
+
pa.array([f"sent: {output}"]),
|
263 |
+
dora_event["metadata"],
|
264 |
+
)
|
265 |
else:
|
266 |
+
send_output(
|
267 |
+
"assistant_message",
|
268 |
+
pa.array(
|
269 |
+
[f"Could not send as topic was not available: {output}"]
|
270 |
+
),
|
271 |
+
dora_event["metadata"],
|
272 |
+
)
|
273 |
except:
|
274 |
+
send_output(
|
275 |
+
"assistant_message",
|
276 |
+
pa.array([f"Could not parse json: {outputs}"]),
|
277 |
+
dora_event["metadata"],
|
278 |
+
)
|
279 |
# if data is not iterable, put data in a list
|
280 |
elif dora_event["type"] == "INPUT" and dora_event["id"] == "assistant":
|
281 |
user_message = dora_event["value"][0].as_py()
|