i make da colab
Browse files
README.md
CHANGED
@@ -89,6 +89,10 @@ pipeline_tag: text2text-generation
|
|
89 |
|
90 |
# flan-t5-base-instruct: dolly_hhrlhf
|
91 |
|
|
|
|
|
|
|
|
|
92 |
This model is a fine-tuned version of [google/flan-t5-base](https://huggingface.co/google/flan-t5-base) on the pszemraj/dolly_hhrlhf-text2text dataset.
|
93 |
|
94 |
## Model description
|
@@ -107,12 +111,11 @@ assistant = pipeline(
|
|
107 |
"text2text-generation",
|
108 |
model_name,
|
109 |
device=0 if torch.cuda.is_available() else -1,
|
110 |
-
torch_dtype=torch.float32, # force fp32 (**experimental** see below)
|
111 |
)
|
112 |
cfg = GenerationConfig.from_pretrained(model_name)
|
113 |
|
114 |
# pass an 'instruction' as the prompt to the pipeline
|
115 |
-
prompt = "
|
116 |
result = assistant(prompt, generation_config=cfg)[0]["generated_text"]
|
117 |
print(result)
|
118 |
```
|
|
|
89 |
|
90 |
# flan-t5-base-instruct: dolly_hhrlhf
|
91 |
|
92 |
+
<a href="https://colab.research.google.com/gist/pszemraj/6ca2b0adc89f6a001a9ba7bcd4300e85/flan-t5-base-instruct-example.ipynb">
|
93 |
+
<img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/>
|
94 |
+
</a>
|
95 |
+
|
96 |
This model is a fine-tuned version of [google/flan-t5-base](https://huggingface.co/google/flan-t5-base) on the pszemraj/dolly_hhrlhf-text2text dataset.
|
97 |
|
98 |
## Model description
|
|
|
111 |
"text2text-generation",
|
112 |
model_name,
|
113 |
device=0 if torch.cuda.is_available() else -1,
|
|
|
114 |
)
|
115 |
cfg = GenerationConfig.from_pretrained(model_name)
|
116 |
|
117 |
# pass an 'instruction' as the prompt to the pipeline
|
118 |
+
prompt = "Write a guide on how to become a ninja while working a 9-5 job."
|
119 |
result = assistant(prompt, generation_config=cfg)[0]["generated_text"]
|
120 |
print(result)
|
121 |
```
|