nazneen commited on
Commit
3c85871
β€’
1 Parent(s): f3311fa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  import torch
3
  import os
4
  from transformers import pipeline
 
5
 
6
  theme = gr.themes.Monochrome(
7
  primary_hue="indigo",
@@ -12,7 +13,8 @@ theme = gr.themes.Monochrome(
12
  )
13
 
14
  TOKEN = os.getenv("USER_TOKEN")
15
- instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
 
16
  instruct_pipeline_llama = pipeline(model="HuggingFaceH4/llama-7b-ift-ds-save-test4", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", use_auth_token=TOKEN)
17
  #instruct_pipeline_12b = pipeline(model="databricks/dolly-v2-12b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
18
 
 
2
  import torch
3
  import os
4
  from transformers import pipeline
5
+ from transformers import AutoTokenizer
6
 
7
  theme = gr.themes.Monochrome(
8
  primary_hue="indigo",
 
13
  )
14
 
15
  TOKEN = os.getenv("USER_TOKEN")
16
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
17
+ instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
18
  instruct_pipeline_llama = pipeline(model="HuggingFaceH4/llama-7b-ift-ds-save-test4", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", use_auth_token=TOKEN)
19
  #instruct_pipeline_12b = pipeline(model="databricks/dolly-v2-12b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
20