Update README.md
Browse files
README.md
CHANGED
@@ -54,9 +54,9 @@ before = f"# This is the assembly code:\n"#prompt
|
|
54 |
after = "\n# What is the source code?\n"#prompt
|
55 |
asm_func = before+asm_func.strip()+after
|
56 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
57 |
-
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype="auto", device_map="auto")
|
58 |
|
59 |
-
inputs = tokenizer(asm_func, return_tensors="pt")
|
60 |
with torch.no_grad():
|
61 |
outputs = model.generate(**inputs, max_new_tokens=2048)### max length to 4096, max new tokens should be below the range
|
62 |
c_func_decompile = tokenizer.decode(outputs[0][len(inputs[0]):-1])
|
|
|
54 |
after = "\n# What is the source code?\n"#prompt
|
55 |
asm_func = before+asm_func.strip()+after
|
56 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
57 |
+
model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype="auto", device_map="auto")
|
58 |
|
59 |
+
inputs = tokenizer(asm_func, return_tensors="pt")
|
60 |
with torch.no_grad():
|
61 |
outputs = model.generate(**inputs, max_new_tokens=2048)### max length to 4096, max new tokens should be below the range
|
62 |
c_func_decompile = tokenizer.decode(outputs[0][len(inputs[0]):-1])
|