mrm8488 commited on
Commit
1d48e95
1 Parent(s): 3e34905

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -12
app.py CHANGED
@@ -1,18 +1,16 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM, set_seed, pipeline
3
 
4
- title = "SantaCoder 🎅 bash/shell 🐚 Completion"
5
- description = "This is a subspace to make code generation with [SantaCoder fine-tuned on The Stack bash/shell](https://huggingface.co/mrm8488/santacoder-finetuned-the-stack-bash-4)"
6
- EXAMPLE_0 = "#!/bin/bash\n# This script removes files larger than 2MB in the current folder\nfind ."
7
- EXAMPLE_1 = "#!/bin/bash\n\n# This script send an email\nto=”admin@example.com”\nsubject=”Greeting”\nmsg=”Welcome to our site”\n"
8
- EXAMPLE_3 = "#!/bin/bash\n# This script convert avi files to mp4\nfor filename in $(ls *.avi); do\n"
9
- EXAMPLE_4 = "#!/bin/bash\nsource=$1\ndest=$2\n# copy source on dest\n"
10
- EXAMPLE_5 = """#!/bin/bash\n\n# This script check if the arg passed as first arg is a founder of huggingface\nfounders_array=("julien" "thom" "clem")"""
11
-
12
-
13
- examples = [[EXAMPLE_0, 14, 0.6, 42], [EXAMPLE_1, 28, 0.6, 42], [EXAMPLE_3, 46, 0.6, 42], [EXAMPLE_4, 35, 0.6, 43], [EXAMPLE_5, 70, 0.6, 43]]
14
- tokenizer = AutoTokenizer.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4")
15
- model = AutoModelForCausalLM.from_pretrained("mrm8488/santacoder-finetuned-the-stack-bash-4", trust_remote_code=True)
16
 
17
 
18
  def code_generation(gen_prompt, max_tokens, temperature=0.6, seed=42):
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM, set_seed, pipeline
3
 
4
+ title = "SantaCoder 🎅 Dockerfiles 🐋 Completion"
5
+ description = "This is a subspace to make code generation with [SantaCoder fine-tuned on The Stack Dockerfiles](https://huggingface.co/mrm8488/santacoder-finetuned-the-stack-dockerfiles)"
6
+ EXAMPLE_0 = "# Dockerfile for Express API"
7
+
8
+
9
+ CKPT = "mrm8488/santacoder-finetuned-the-stack-dockerfiles"
10
+
11
+ examples = [[EXAMPLE_0, 55, 0.6, 42]]
12
+ tokenizer = AutoTokenizer.from_pretrained(CKPT)
13
+ model = AutoModelForCausalLM.from_pretrained(CKPT, trust_remote_code=True)
 
 
14
 
15
 
16
  def code_generation(gen_prompt, max_tokens, temperature=0.6, seed=42):