ybelkada commited on
Commit
7d67d43
1 Parent(s): 038d8e2

Update README.md (#1)

Browse files

- Update README.md (499f0f00b6f38e9aa763e572df752a8a1f92e97e)

Files changed (1) hide show
  1. README.md +8 -8
README.md CHANGED
@@ -74,8 +74,8 @@ Find below some example scripts on how to use the model in `transformers`:
74
 
75
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
76
 
77
- tokenizer = AutoTokenizer.from_pretrained("google/switch-base-8")
78
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-8")
79
 
80
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
81
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
@@ -96,8 +96,8 @@ print(tokenizer.decode(outputs[0]))
96
  # pip install accelerate
97
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
98
 
99
- tokenizer = AutoTokenizer.from_pretrained("google/switch-base-8")
100
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-8", device_map="auto")
101
 
102
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
103
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
@@ -120,8 +120,8 @@ print(tokenizer.decode(outputs[0]))
120
  # pip install accelerate
121
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
122
 
123
- tokenizer = AutoTokenizer.from_pretrained("google/switch-base-8")
124
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-8", device_map="auto", torch_dtype=torch.float16)
125
 
126
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
127
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
@@ -142,8 +142,8 @@ print(tokenizer.decode(outputs[0]))
142
  # pip install bitsandbytes accelerate
143
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
144
 
145
- tokenizer = AutoTokenizer.from_pretrained("google/switch-base-8")
146
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-8", device_map="auto")
147
 
148
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
149
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
74
 
75
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
76
 
77
+ tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
78
+ model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256")
79
 
80
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
81
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
 
96
  # pip install accelerate
97
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
98
 
99
+ tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
100
+ model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto")
101
 
102
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
103
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
120
  # pip install accelerate
121
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
122
 
123
+ tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
124
+ model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto", torch_dtype=torch.float16)
125
 
126
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
127
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
142
  # pip install bitsandbytes accelerate
143
  from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
144
 
145
+ tokenizer = AutoTokenizer.from_pretrained("google/switch-base-256")
146
+ model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-base-256", device_map="auto")
147
 
148
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
149
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)