Text2Text Generation
Transformers
PyTorch
English
switch_transformers
Files changed (1) hide show
  1. README.md +8 -8
README.md CHANGED
@@ -71,10 +71,10 @@ Find below some example scripts on how to use the model in `transformers` - bear
71
 
72
  ```python
73
  # pip install accelerate
74
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
75
 
76
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
77
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", offload_folder=<OFFLOAD_FOLDER>)
78
 
79
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
80
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
@@ -93,10 +93,10 @@ print(tokenizer.decode(outputs[0]))
93
 
94
  ```python
95
  # pip install accelerate
96
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
97
 
98
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
99
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", offload_folder=<OFFLOAD_FOLDER>)
100
 
101
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
102
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
@@ -117,10 +117,10 @@ print(tokenizer.decode(outputs[0]))
117
 
118
  ```python
119
  # pip install accelerate
120
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
121
 
122
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
123
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", torch_dtype=torch.bfloat16, offload_folder=<OFFLOAD_FOLDER>)
124
 
125
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
126
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
@@ -139,10 +139,10 @@ print(tokenizer.decode(outputs[0]))
139
 
140
  ```python
141
  # pip install bitsandbytes accelerate
142
- from transformers import AutoTokenizer, SwitchTransformersConditionalGeneration
143
 
144
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
145
- model = SwitchTransformersConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", offload_folder=<OFFLOAD_FOLDER>)
146
 
147
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
148
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
71
 
72
  ```python
73
  # pip install accelerate
74
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
75
 
76
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
77
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", offload_folder=<OFFLOAD_FOLDER>)
78
 
79
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
80
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
 
93
 
94
  ```python
95
  # pip install accelerate
96
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
97
 
98
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
99
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", offload_folder=<OFFLOAD_FOLDER>)
100
 
101
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
102
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
117
 
118
  ```python
119
  # pip install accelerate
120
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
121
 
122
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
123
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", torch_dtype=torch.bfloat16, offload_folder=<OFFLOAD_FOLDER>)
124
 
125
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
126
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)
 
139
 
140
  ```python
141
  # pip install bitsandbytes accelerate
142
+ from transformers import AutoTokenizer, SwitchTransformersForConditionalGeneration
143
 
144
  tokenizer = AutoTokenizer.from_pretrained("google/switch-c-2048")
145
+ model = SwitchTransformersForConditionalGeneration.from_pretrained("google/switch-c-2048", device_map="auto", offload_folder=<OFFLOAD_FOLDER>)
146
 
147
  input_text = "A <extra_id_0> walks into a bar a orders a <extra_id_1> with <extra_id_2> pinch of <extra_id_3>."
148
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to(0)