ML610 commited on
Commit
821e06e
1 Parent(s): 6fc6d01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -33,7 +33,7 @@ def generate(
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
- return llm(format_prompt(user_prompt), **{item: value for item, value in generation_config.items()})
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
 
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
+ return llm(format_prompt(user_prompt), **{key: generation_config[key] for key in generation_config.keys()})
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048