Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -15,7 +15,7 @@ theme = gr.themes.Monochrome(
|
|
15 |
TOKEN = os.getenv("USER_TOKEN")
|
16 |
tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b-instruct")
|
17 |
instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", device=0)
|
18 |
-
instruct_pipeline_llama = pipeline(model="
|
19 |
|
20 |
def generate(query, temperature, top_p, top_k, max_new_tokens):
|
21 |
return [instruct_pipeline_falcon(query, temperature=temperature, top_p=top_p, top_k=top_k, max_new_tokens=max_new_tokens)[0]["generated_text"],
|
|
|
15 |
TOKEN = os.getenv("USER_TOKEN")
|
16 |
tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b-instruct")
|
17 |
instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", device=0)
|
18 |
+
instruct_pipeline_llama = pipeline(model="project-baize/baize-v2-7b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
|
19 |
|
20 |
def generate(query, temperature, top_p, top_k, max_new_tokens):
|
21 |
return [instruct_pipeline_falcon(query, temperature=temperature, top_p=top_p, top_k=top_k, max_new_tokens=max_new_tokens)[0]["generated_text"],
|