arampacha commited on
Commit
f0a45da
1 Parent(s): 23a6073
Files changed (1) hide show
  1. app.py +92 -11
app.py CHANGED
@@ -1,25 +1,106 @@
1
  import gradio as gr
 
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
- model_name = "flax-community/gpt-neo-125M-apps"
5
- # define model and tokenizer
6
- model = AutoModelForCausalLM.from_pretrained(model_name)
7
- tokenizer = AutoTokenizer.from_pretrained(model_name)
8
  tokenizer.pad_token = tokenizer.eos_token
9
 
10
- def generate_solution(prompt, **kwargs):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  input_ids = tokenizer(prompt, return_tensors="pt").input_ids
12
  start = len(input_ids[0])
13
- output = model.generate(input_ids, pad_token_id=tokenizer.pad_token_id, **kwargs)
14
- return tokenizer.decode(output[0][start:])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
  inputs = [
17
- gr.inputs.Textbox(placeholder="Define a problem here ...", lines=5)
 
 
 
 
 
 
 
18
  ]
19
 
20
  gr.Interface(
21
  generate_solution,
22
  inputs=inputs,
23
- outputs="text",
24
- title="Coding problem solver",
25
- ).launch()
 
 
1
  import gradio as gr
2
+
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
+ model_name = "flax-community/gpt-code-clippy-125M-apps-alldata"
6
+ model = AutoModelForCausalLM.from_pretrained(model_name, from_flax=True)
7
+ tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
 
8
  tokenizer.pad_token = tokenizer.eos_token
9
 
10
+
11
+ def format_input(question, starter_code=""):
12
+ answer_type = "\nUse Call-Based format\n" if starter_code else "\nUse Standard Input format\n"
13
+ return f"\nQUESTION:\n{question}\n{starter_code}\n{answer_type}\nANSWER:\n"
14
+
15
+
16
+ def format_outputs(text):
17
+ formatted_text =f'''
18
+ <head>
19
+ <link rel="stylesheet"
20
+ href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.0.3/styles/default.min.css">
21
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.0.3/highlight.min.js"></script>
22
+ <script>hljs.initHighlightingOnLoad();</script>
23
+ </head>
24
+ <body>
25
+ <pre><code class="python">{text}</code></pre>
26
+ </body>
27
+ '''
28
+ return formatted_text
29
+
30
+
31
+ def generate_solution(question, starter_code="", temperature=1., num_beams=1):
32
+ prompt = format_input(question, starter_code)
33
  input_ids = tokenizer(prompt, return_tensors="pt").input_ids
34
  start = len(input_ids[0])
35
+ output = model.generate(
36
+ input_ids,
37
+ max_length=start+200,
38
+ do_sample=True,
39
+ top_p=0.95,
40
+ pad_token_id=tokenizer.pad_token_id,
41
+ early_stopping=True,
42
+ temperature=1.,
43
+ num_beams=int(num_beams),
44
+ no_repeat_ngram_size=None,
45
+ repetition_penalty=None,
46
+ num_return_sequences=None,
47
+ )
48
+
49
+ return format_outputs(tokenizer.decode(output[0][start:]).strip())
50
+
51
+
52
+ _EXAMPLES = [
53
+ [
54
+ """
55
+ Given a 2D list of size `m * n`. Your task is to find the sum of minimum value in each row.
56
+ For Example:
57
+ ```python
58
+ [
59
+ [1, 2, 3, 4, 5], # minimum value of row is 1
60
+ [5, 6, 7, 8, 9], # minimum value of row is 5
61
+ [20, 21, 34, 56, 100] # minimum value of row is 20
62
+ ]
63
+ ```
64
+ So, the function should return `26` because sum of minimums is as `1 + 5 + 20 = 26`
65
+ """,
66
+ "",
67
+ 0.8,
68
+ ],
69
+ [
70
+ """
71
+ # Personalized greeting
72
+
73
+ Create a function that gives a personalized greeting. This function takes two parameters: `name` and `owner`.
74
+ """,
75
+ """
76
+ Use conditionals to return the proper message:
77
+
78
+ case| return
79
+ --- | ---
80
+ name equals owner | 'Hello boss'
81
+ otherwise | 'Hello guest'
82
+ def greet(name, owner):
83
+ """,
84
+ 0.8,
85
+ ]
86
+ ]
87
+
88
 
89
  inputs = [
90
+ gr.inputs.Textbox(placeholder="Define a problem here...", lines=7),
91
+ gr.inputs.Textbox(placeholder="Provide optional starter code...", lines=3),
92
+ gr.inputs.Slider(0.5, 1.5, 0.1, default=0.8, label="Temperature"),
93
+ gr.inputs.Slider(1,4,1,default=1, label="Beam size")
94
+ ]
95
+
96
+ outputs = [
97
+ gr.outputs.HTML(label="Solution")
98
  ]
99
 
100
  gr.Interface(
101
  generate_solution,
102
  inputs=inputs,
103
+ outputs=outputs,
104
+ title="Code Clippy: Problem Solver",
105
+ examples=_EXAMPLES,
106
+ ).launch(share=False)