terryyz commited on
Commit
f8c3014
1 Parent(s): 6854c08

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -166,22 +166,22 @@ def generate(
166
  )
167
 
168
  if method == "Base":
169
- output = stream(basemodel, prompt, generate_kwargs)
170
  elif method == "Prefix":
171
- output = model_map[library + " Prefix"]
172
  elif method == "Evo Prefix" and library in ["SQLModel", "SfePy", "MegEngine"]:
173
- output = model_map["Main Evo Prefix"]
174
  elif method == "FFT" and library in ["SQLModel", "SfePy", "MegEngine"]:
175
- output = model_map[library + " FFT"]
176
  elif method == "Evo FFT" and library in ["SQLModel", "SfePy", "MegEngine"]:
177
- output = model_map["Main Evo FFT"]
178
  elif method == "Full Data FFT" and library in ["SQLModel", "SfePy", "MegEngine"]:
179
- output = model_map["Main FD FFT"]
180
  elif method == "Evo Prefix" and library in ["LangChain", "LlamaIndex", "DSPy"]:
181
  model = model_map["CS Evo Prefix"]
182
  else:
183
  output = ""
184
- model.to(device)
185
  input_ids = tokenizer(code, return_tensors="pt").to(device)
186
  # generated_ids = model.generate(**input_ids, **generate_kwargs)
187
  generated_ids = model.generate(**input_ids)
 
166
  )
167
 
168
  if method == "Base":
169
+ model = basemodel
170
  elif method == "Prefix":
171
+ model = model_map[library + " Prefix"]
172
  elif method == "Evo Prefix" and library in ["SQLModel", "SfePy", "MegEngine"]:
173
+ model = model_map["Main Evo Prefix"]
174
  elif method == "FFT" and library in ["SQLModel", "SfePy", "MegEngine"]:
175
+ model = model_map[library + " FFT"]
176
  elif method == "Evo FFT" and library in ["SQLModel", "SfePy", "MegEngine"]:
177
+ model = model_map["Main Evo FFT"]
178
  elif method == "Full Data FFT" and library in ["SQLModel", "SfePy", "MegEngine"]:
179
+ model = model_map["Main FD FFT"]
180
  elif method == "Evo Prefix" and library in ["LangChain", "LlamaIndex", "DSPy"]:
181
  model = model_map["CS Evo Prefix"]
182
  else:
183
  output = ""
184
+ # model.to(device)
185
  input_ids = tokenizer(code, return_tensors="pt").to(device)
186
  # generated_ids = model.generate(**input_ids, **generate_kwargs)
187
  generated_ids = model.generate(**input_ids)