lgaleana commited on
Commit
8d878e6
1 Parent(s): 7a5c823

Fix code gen

Browse files
Files changed (2) hide show
  1. app.py +2 -2
  2. components.py +27 -26
app.py CHANGED
@@ -9,11 +9,11 @@ with gr.Blocks() as demo:
9
  gr.Markdown(
10
  """
11
  # Toolkit
12
- Assemble tasks to build an E2E application.
13
  <br>There are 2 types of tasks.
14
  <br>
15
  <br>**AI Task**: Ask ChatGPT to do something for you. Eg, summarize a text.
16
- <br>**Code Task**: ChatGPT will create a python function that will be executed on the fly. Eg, get the text from an url.
17
  <br>
18
  <br>Max 10 tasks allowed (for now).
19
  """
 
9
  gr.Markdown(
10
  """
11
  # Toolkit
12
+ Assemble tasks to build an E2E application. Give instructions with text.
13
  <br>There are 2 types of tasks.
14
  <br>
15
  <br>**AI Task**: Ask ChatGPT to do something for you. Eg, summarize a text.
16
+ <br>**Code Task**: ChatGPT will create a python function to do something for you. Eg, get the text from a website.
17
  <br>
18
  <br>Max 10 tasks allowed (for now).
19
  """
components.py CHANGED
@@ -1,5 +1,4 @@
1
  from abc import ABC, abstractmethod
2
- from concurrent.futures import ThreadPoolExecutor
3
  from typing import Any, Dict, List, Union
4
 
5
  import gradio as gr
@@ -158,22 +157,23 @@ class CodeTask(TaskComponent):
158
 
159
  @staticmethod
160
  def generate_code(code_prompt: str):
 
 
161
  raw_prompt_output = ""
162
- packages = ""
163
- function = ""
164
  error_message = gr.HighlightedText.update(None, visible=False)
165
  accordion = gr.Accordion.update()
166
 
167
  if not code_prompt:
168
  return (
169
  raw_prompt_output,
170
- packages,
171
- function,
172
  error_message,
173
  accordion,
174
  )
175
 
176
  print(f"Generating code.")
 
177
  try:
178
  raw_prompt_output = ai.llm.next(
179
  [
@@ -191,30 +191,31 @@ class CodeTask(TaskComponent):
191
  temperature=0,
192
  )
193
 
194
- def llm_call(prompt):
195
- return ai.llm.next([{"role": "user", "content": prompt}], temperature=0)
196
-
197
- with ThreadPoolExecutor(max_workers=2) as executor:
198
- packages, function = tuple(
199
- executor.map(
200
- llm_call,
201
- [
202
- f"""
203
- The following text should have a python function with some imports that need to be installed:
204
  {raw_prompt_output}
205
 
206
- Extract all the python packages that need to be installed with pip and nothing else.
207
- Print them as a single python list that can be used with eval().
208
- """,
209
- f"""
210
- The following text should have a python function and some imports:
211
- {raw_prompt_output}
212
 
213
- Exclusively extract the function and the imports, nothing else, so that it can be used with exec().
 
 
 
 
 
 
214
  """,
215
- ],
216
- )
 
217
  )
 
218
  except Exception as e:
219
  error_message = gr.HighlightedText.update(
220
  value=[(str(e), "ERROR")], visible=True
@@ -222,8 +223,8 @@ class CodeTask(TaskComponent):
222
  accordion = gr.Accordion.update(open=True)
223
  return (
224
  raw_prompt_output,
225
- packages,
226
- function.replace("```python", "").replace("```", ""),
227
  error_message,
228
  accordion,
229
  )
 
1
  from abc import ABC, abstractmethod
 
2
  from typing import Any, Dict, List, Union
3
 
4
  import gradio as gr
 
157
 
158
  @staticmethod
159
  def generate_code(code_prompt: str):
160
+ import json
161
+
162
  raw_prompt_output = ""
 
 
163
  error_message = gr.HighlightedText.update(None, visible=False)
164
  accordion = gr.Accordion.update()
165
 
166
  if not code_prompt:
167
  return (
168
  raw_prompt_output,
169
+ "",
170
+ "",
171
  error_message,
172
  accordion,
173
  )
174
 
175
  print(f"Generating code.")
176
+ parsed_output = {"packages": "", "script": ""}
177
  try:
178
  raw_prompt_output = ai.llm.next(
179
  [
 
191
  temperature=0,
192
  )
193
 
194
+ parsed_output = json.loads(
195
+ ai.llm.next(
196
+ [
197
+ {
198
+ "role": "user",
199
+ "content": f"""
200
+ The following text should have a python function with some imports that might need to be installed:
 
 
 
201
  {raw_prompt_output}
202
 
203
+ Extract all the python packages that need to be installed with pip.
204
+ Also extract the function and the imports as a single python script.
 
 
 
 
205
 
206
+ Write a JSON as follows:
207
+ ```
208
+ {{
209
+ "packages": Python list of packages to be parsed with eval(). If no packages, the list should be empty.
210
+ "script": Python script to be executed with exec(). Include only the function and the imports.
211
+ }}
212
+ ```
213
  """,
214
+ }
215
+ ],
216
+ temperature=0,
217
  )
218
+ )
219
  except Exception as e:
220
  error_message = gr.HighlightedText.update(
221
  value=[(str(e), "ERROR")], visible=True
 
223
  accordion = gr.Accordion.update(open=True)
224
  return (
225
  raw_prompt_output,
226
+ parsed_output["packages"],
227
+ parsed_output["script"],
228
  error_message,
229
  accordion,
230
  )