mrm8488 commited on
Commit
3e34905
0 Parent(s):

Duplicate from mrm8488/santacoder-bash-completion

Browse files
Files changed (4) hide show
  1. .gitattributes +27 -0
  2. README.md +13 -0
  3. app.py +58 -0
  4. requirements.txt +3 -0
.gitattributes ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ftz filter=lfs diff=lfs merge=lfs -text
6
+ *.gz filter=lfs diff=lfs merge=lfs -text
7
+ *.h5 filter=lfs diff=lfs merge=lfs -text
8
+ *.joblib filter=lfs diff=lfs merge=lfs -text
9
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
10
+ *.model filter=lfs diff=lfs merge=lfs -text
11
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
12
+ *.onnx filter=lfs diff=lfs merge=lfs -text
13
+ *.ot filter=lfs diff=lfs merge=lfs -text
14
+ *.parquet filter=lfs diff=lfs merge=lfs -text
15
+ *.pb filter=lfs diff=lfs merge=lfs -text
16
+ *.pt filter=lfs diff=lfs merge=lfs -text
17
+ *.pth filter=lfs diff=lfs merge=lfs -text
18
+ *.rar filter=lfs diff=lfs merge=lfs -text
19
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
20
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
21
+ *.tflite filter=lfs diff=lfs merge=lfs -text
22
+ *.tgz filter=lfs diff=lfs merge=lfs -text
23
+ *.wasm filter=lfs diff=lfs merge=lfs -text
24
+ *.xz filter=lfs diff=lfs merge=lfs -text
25
+ *.zip filter=lfs diff=lfs merge=lfs -text
26
+ *.zstandard filter=lfs diff=lfs merge=lfs -text
27
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Santacoder Bash/Shell completion
3
+ emoji: 🎅🐚
4
+ colorFrom: blue
5
+ colorTo: purple
6
+ sdk: gradio
7
+ sdk_version: 3.0.4
8
+ app_file: app.py
9
+ pinned: false
10
+ duplicated_from: mrm8488/santacoder-bash-completion
11
+ ---
12
+
13
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, set_seed, pipeline
3
+
4
+ title = "SantaCoder 🎅 bash/shell 🐚 Completion"
5
+ description = "This is a subspace to make code generation with [SantaCoder fine-tuned on The Stack bash/shell](https://huggingface.co/mrm8488/santacoder-finetuned-the-stack-bash-4)"
6
+ EXAMPLE_0 = "#!/bin/bash\n# This script removes files larger than 2MB in the current folder\nfind ."
7
+ EXAMPLE_1 = "#!/bin/bash\n\n# This script send an email\nto=”admin@example.com”\nsubject=”Greeting”\nmsg=”Welcome to our site”\n"
8
+ EXAMPLE_3 = "#!/bin/bash\n# This script convert avi files to mp4\nfor filename in $(ls *.avi); do\n"
9
+ EXAMPLE_4 = "#!/bin/bash\nsource=$1\ndest=$2\n# copy source on dest\n"
10
+ EXAMPLE_5 = """#!/bin/bash\n\n# This script check if the arg passed as first arg is a founder of huggingface\nfounders_array=("julien" "thom" "clem")"""
11
+
12
+
13
+ examples = [[EXAMPLE_0, 14, 0.6, 42], [EXAMPLE_1, 28, 0.6, 42], [EXAMPLE_3, 46, 0.6, 42], [EXAMPLE_4, 35, 0.6, 43], [EXAMPLE_5, 70, 0.6, 43]]
14
+ tokenizer = AutoTokenizer.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4")
15
+ model = AutoModelForCausalLM.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4", trust_remote_code=True)
16
+
17
+
18
+ def code_generation(gen_prompt, max_tokens, temperature=0.6, seed=42):
19
+ set_seed(seed)
20
+ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
21
+ generated_text = pipe(gen_prompt, do_sample=True, top_p=0.95, temperature=temperature, max_new_tokens=max_tokens)[0]['generated_text']
22
+ return generated_text
23
+
24
+
25
+ iface = gr.Interface(
26
+ fn=code_generation,
27
+ inputs=[
28
+ gr.Textbox(lines=10, label="Input code"),
29
+ gr.inputs.Slider(
30
+ minimum=8,
31
+ maximum=256,
32
+ step=1,
33
+ default=8,
34
+ label="Number of tokens to generate",
35
+ ),
36
+ gr.inputs.Slider(
37
+ minimum=0,
38
+ maximum=2,
39
+ step=0.1,
40
+ default=0.6,
41
+ label="Temperature",
42
+ ),
43
+ gr.inputs.Slider(
44
+ minimum=0,
45
+ maximum=1000,
46
+ step=1,
47
+ default=42,
48
+ label="Random seed to use for the generation"
49
+ )
50
+ ],
51
+ outputs=gr.Textbox(label="Predicted code", lines=10),
52
+ examples=examples,
53
+ layout="horizontal",
54
+ theme="peach",
55
+ description=description,
56
+ title=title
57
+ )
58
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ transformers==4.19.0
2
+ accelerate==0.10.0
3
+ torch==1.11.0