|
from dora import DoraStatus |
|
import pylcs |
|
import textwrap |
|
import pandas as pd |
|
import os |
|
import pyarrow as pa |
|
import numpy as np |
|
from ctransformers import AutoModelForCausalLM |
|
|
|
MIN_NUMBER_LINES = 4 |
|
MAX_NUMBER_LINES = 21 |
|
|
|
|
|
def search_most_simlar_line(text, searched_line): |
|
lines = text.split("\n") |
|
values = [] |
|
|
|
for line in lines[MIN_NUMBER_LINES:MAX_NUMBER_LINES]: |
|
values.append(pylcs.edit_distance(line, searched_line)) |
|
output = lines[np.array(values).argmin() + MIN_NUMBER_LINES] |
|
return output |
|
|
|
|
|
def strip_indentation(code_block): |
|
|
|
dedented_code = textwrap.dedent(code_block) |
|
|
|
return dedented_code |
|
|
|
|
|
def replace_code_with_indentation(original_code, replacement_code): |
|
|
|
lines = original_code.splitlines() |
|
if len(lines) != 0: |
|
|
|
indentation = lines[0][: len(lines[0]) - len(lines[0].lstrip())] |
|
|
|
|
|
new_code_lines = indentation + replacement_code |
|
else: |
|
new_code_lines = replacement_code |
|
return new_code_lines |
|
|
|
|
|
def replace_source_code(source_code, gen_replacement): |
|
initial = search_most_simlar_line(source_code, gen_replacement) |
|
print("Initial source code: %s" % initial) |
|
replacement = strip_indentation( |
|
gen_replacement.replace("```python\n", "") |
|
.replace("\n```", "") |
|
.replace("\n", "") |
|
) |
|
intermediate_result = replace_code_with_indentation(initial, replacement) |
|
print("Intermediate result: %s" % intermediate_result) |
|
end_result = source_code.replace(initial, intermediate_result) |
|
return end_result |
|
|
|
|
|
def save_as(content, path): |
|
|
|
with open(path, "w") as file: |
|
file.write(content) |
|
|
|
|
|
class Operator: |
|
def __init__(self): |
|
|
|
self.llm = AutoModelForCausalLM.from_pretrained( |
|
"TheBloke/OpenHermes-2.5-Mistral-7B-GGUF", |
|
model_file="openhermes-2.5-mistral-7b.Q4_K_M.gguf", |
|
model_type="mistral", |
|
gpu_layers=50, |
|
) |
|
|
|
def on_event( |
|
self, |
|
dora_event, |
|
send_output, |
|
) -> DoraStatus: |
|
if dora_event["type"] == "INPUT": |
|
input = dora_event["value"][0].as_py() |
|
|
|
with open(input["path"], "r", encoding="utf8") as f: |
|
raw = f.read() |
|
prompt = f"{raw[:400]} \n\n {input['query']}. " |
|
print("revieved prompt: {}".format(prompt)) |
|
output = self.ask_mistral( |
|
"You're a python code expert. Respond with only one line of code that modify a constant variable. Keep the uppercase.", |
|
prompt, |
|
) |
|
print("output: {}".format(output)) |
|
source_code = replace_source_code(raw, output) |
|
send_output( |
|
"output_file", |
|
pa.array( |
|
[{"raw": source_code, "path": input["path"], "gen_output": output}] |
|
), |
|
dora_event["metadata"], |
|
) |
|
return DoraStatus.CONTINUE |
|
|
|
def ask_mistral(self, system_message, prompt): |
|
prompt_template = f"""<|im_start|>system |
|
{system_message}<|im_end|> |
|
<|im_start|>user |
|
{prompt}<|im_end|> |
|
<|im_start|>assistant |
|
""" |
|
|
|
|
|
outputs = self.llm( |
|
prompt_template, |
|
) |
|
|
|
|
|
|
|
return outputs.split("<|im_end|>")[0] |
|
|
|
|
|
if __name__ == "__main__": |
|
op = Operator() |
|
|
|
|
|
current_file_path = __file__ |
|
|
|
|
|
current_directory = os.path.dirname(current_file_path) |
|
|
|
path = current_directory + "/planning_op.py" |
|
with open(path, "r", encoding="utf8") as f: |
|
raw = f.read() |
|
|
|
op.on_event( |
|
{ |
|
"type": "INPUT", |
|
"id": "tick", |
|
"value": pa.array( |
|
[ |
|
{ |
|
"raw": raw, |
|
"path": path, |
|
"query": "Set rotation to 20", |
|
} |
|
] |
|
), |
|
"metadata": [], |
|
}, |
|
print, |
|
) |
|
|