naruto7 commited on
Commit
a9a0b5c
1 Parent(s): 270abe0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -40
app.py CHANGED
@@ -3,9 +3,6 @@ import gradio as gr
3
  from rich.console import Console
4
  from rich.syntax import Syntax
5
  from transformers import AutoModelForCausalLM, AutoTokenizer
6
- import requests
7
- import json
8
- import webbrowser
9
 
10
  # model_name = "flax-community/gpt-code-clippy-1.3B-apps-alldata"
11
  model_name = "flax-community/gpt-code-clippy-125M-apps-alldata"
@@ -18,19 +15,9 @@ console = Console(record=True)
18
 
19
  def format_input(question, starter_code=""):
20
  answer_type = (
21
- "\
22
- Use Call-Based format\
23
- " if starter_code else "\
24
- Use Standard Input format\
25
- "
26
  )
27
- return f"\
28
- QUESTION:\
29
- {question}\
30
- {starter_code}\
31
- {answer_type}\
32
- ANSWER:\
33
- "
34
 
35
 
36
  def format_outputs(text):
@@ -107,34 +94,9 @@ inputs = [
107
  gr.inputs.Textbox(placeholder="Provide optional starter code...", lines=3),
108
  gr.inputs.Slider(0.5, 1.5, 0.1, default=0.8, label="Temperature"),
109
  gr.inputs.Slider(1, 4, 1, default=1, label="Beam size"),
110
- gr.inputs.Textbox(lines=1, label="Your GitHub API token")
111
  ]
112
 
113
  outputs = [gr.outputs.HTML(label="Solution")]
114
- print(outputs)
115
-
116
- # adding carbon support
117
-
118
- GITHUB_API="https://api.github.com"
119
- API_TOKEN=gr.inputs.Textbox(label="Your GitHub API token")
120
- #form a request URL
121
- url=GITHUB_API+"/gists"
122
-
123
- #print headers,parameters,payload
124
- headers={'Authorization':'token %s'%API_TOKEN}
125
- params={'scope':'gist'}
126
- payload={outputs}
127
-
128
-
129
-
130
- res=requests.post(url,headers=headers,params=params,data=json.dumps(payload))
131
-
132
-
133
- col = st.beta_columns([2, 4])
134
- if col.button("Create a 'carbon' copy"):
135
- carbon_url='https://carbon.now.sh/'+res.text.split(',')[0].split('/')[-1][:-1]
136
- webbrowser.open_new(carbon_url)
137
-
138
 
139
  gr.Interface(
140
  generate_solution,
3
  from rich.console import Console
4
  from rich.syntax import Syntax
5
  from transformers import AutoModelForCausalLM, AutoTokenizer
 
 
 
6
 
7
  # model_name = "flax-community/gpt-code-clippy-1.3B-apps-alldata"
8
  model_name = "flax-community/gpt-code-clippy-125M-apps-alldata"
15
 
16
  def format_input(question, starter_code=""):
17
  answer_type = (
18
+ "\nUse Call-Based format\n" if starter_code else "\nUse Standard Input format\n"
 
 
 
 
19
  )
20
+ return f"\nQUESTION:\n{question}\n{starter_code}\n{answer_type}\nANSWER:\n"
 
 
 
 
 
 
21
 
22
 
23
  def format_outputs(text):
94
  gr.inputs.Textbox(placeholder="Provide optional starter code...", lines=3),
95
  gr.inputs.Slider(0.5, 1.5, 0.1, default=0.8, label="Temperature"),
96
  gr.inputs.Slider(1, 4, 1, default=1, label="Beam size"),
 
97
  ]
98
 
99
  outputs = [gr.outputs.HTML(label="Solution")]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
  gr.Interface(
102
  generate_solution,