ML610 commited on
Commit
147c094
1 Parent(s): 821e06e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -33,7 +33,7 @@ def generate(
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
- return llm(format_prompt(user_prompt), **{key: generation_config[key] for key in generation_config.keys()})
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048
 
33
  ):
34
  """run model inference, will return a Generator if streaming is true"""
35
 
36
+ return llm(format_prompt(user_prompt), **asdict(generation_config))
37
 
38
  config = AutoConfig.from_pretrained(
39
  "teknium/Replit-v2-CodeInstruct-3B", context_length=2048