GusPuffy commited on
Commit
6783f61
1 Parent(s): 01f2fb9

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. decompile.py +1 -1
README.md CHANGED
@@ -71,7 +71,7 @@ python convert_pyc_to_bytecode.py directory_with_files
71
  2. Decompile the bytecode strings back to Python source code (This will take some time depending on how many files there are)
72
  ```
73
  conda activate pydecompiler-310
74
- python convert_pyc_to_bytecode.py directory_with_files
75
  ```
76
  Note that files over 15k token count are skipped.
77
  Files over 10k will most likely be truncated due to the context limit.
 
71
  2. Decompile the bytecode strings back to Python source code (This will take some time depending on how many files there are)
72
  ```
73
  conda activate pydecompiler-310
74
+ python decompile.py directory_with_files
75
  ```
76
  Note that files over 15k token count are skipped.
77
  Files over 10k will most likely be truncated due to the context limit.
decompile.py CHANGED
@@ -5,7 +5,7 @@ from llama_cpp import Llama
5
  from util import create_prompt, is_bytecode_empty
6
 
7
  llm = Llama(
8
- model_path="/home/guspuffy/projects/output-merged/GGUF/sentient-simulations-pydecompiler-3.7-6.7b-v0.9-q8_0.gguf",
9
  n_gpu_layers=-1,
10
  last_n_tokens_size=0,
11
  n_ctx=16384,
 
5
  from util import create_prompt, is_bytecode_empty
6
 
7
  llm = Llama(
8
+ model_path="sentient-simulations-pydecompiler-3.7-6.7b-v0.9-q8_0.gguf",
9
  n_gpu_layers=-1,
10
  last_n_tokens_size=0,
11
  n_ctx=16384,