ML610 commited on
Commit
2350bde
1 Parent(s): cca5ead

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -33,7 +33,7 @@ def generate(
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
- return next(llm(format_prompt(user_prompt), **asdict(generation_config)))["text"]
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
@@ -70,8 +70,11 @@ examples = [example_1, example_2]
70
 
71
  def generate_code(user_input):
72
  response = generate(llm, generation_config, user_input)
73
- print(response)
74
- return response
 
 
 
75
 
76
  UI = gr.Interface(
77
  fn=generate_code,
 
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
+ return llm(format_prompt(user_prompt), **asdict(generation_config))
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
 
70
 
71
  def generate_code(user_input):
72
  response = generate(llm, generation_config, user_input)
73
+ code = ""
74
+ for word in response:
75
+ code = code + word
76
+ print(code)
77
+ return code
78
 
79
  UI = gr.Interface(
80
  fn=generate_code,