nazneen commited on
Commit
2a946bb
β€’
1 Parent(s): c1bd56c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -15,7 +15,7 @@ theme = gr.themes.Monochrome(
15
  TOKEN = os.getenv("USER_TOKEN")
16
  tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b-instruct")
17
  instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", device=0)
18
- instruct_pipeline_llama = pipeline(model="HuggingFaceH4/llama-7b-ift-ds-save-test4", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", use_auth_token=TOKEN)
19
 
20
  def generate(query, temperature, top_p, top_k, max_new_tokens):
21
  return [instruct_pipeline_falcon(query, temperature=temperature, top_p=top_p, top_k=top_k, max_new_tokens=max_new_tokens)[0]["generated_text"],
 
15
  TOKEN = os.getenv("USER_TOKEN")
16
  tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b-instruct")
17
  instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", device=0)
18
+ instruct_pipeline_llama = pipeline(model="project-baize/baize-v2-7b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
19
 
20
  def generate(query, temperature, top_p, top_k, max_new_tokens):
21
  return [instruct_pipeline_falcon(query, temperature=temperature, top_p=top_p, top_k=top_k, max_new_tokens=max_new_tokens)[0]["generated_text"],