|
--- |
|
license: apache-2.0 |
|
library_name: "transformers.js" |
|
base_model: Locutusque/TinyMistral-248M |
|
--- |
|
|
|
INT8 ONNX version of [Locutusque/TinyMistral-248M](https://huggingface.co/Locutusque/TinyMistral-248M) to use with [Transformers.js](https://huggingface.co/docs/transformers.js). |
|
|
|
### Example usage |
|
#### Pipeline API |
|
```js |
|
import { pipeline } from '@xenova/transformers'; |
|
|
|
const generator = await pipeline('text-generation', 'Felladrin/onnx-int8-TinyMistral-248M'); |
|
const output = await generator('Once upon a time,', { add_special_tokens: true, max_new_tokens: 60, repetition_penalty: 1.2}); |
|
console.log(output); |
|
// 'Once upon a time, the world was in turmoil. The United States had been on an unprecedented hiatus since 1970 and it seemed that America’s role as a global powerhouse would be at risk if we were to continue with our current political system.\n\nThe US has become' |
|
``` |
|
|
|
#### Auto Classes |
|
```js |
|
import { AutoModelForCausalLM, AutoTokenizer } from '@xenova/transformers'; |
|
|
|
const model_path = 'Felladrin/onnx-int8-TinyMistral-248M'; |
|
const model = await AutoModelForCausalLM.from_pretrained(model_path); |
|
const tokenizer = await AutoTokenizer.from_pretrained(model_path); |
|
|
|
const prompt = 'Once upon a time,'; |
|
const { input_ids } = tokenizer(prompt); |
|
const tokens = await model.generate(input_ids, { max_new_tokens: 60, repetition_penalty: 1.2}); |
|
console.log(tokenizer.decode(tokens[0], { skip_special_tokens: true })); |
|
// 'Once upon a time, the world was in turmoil. The United States had been on an unprecedented hiatus since 1970 and it seemed that America’s role as a global powerhouse would be at risk if we were to continue with our current political system.\n\nThe US has become' |
|
``` |
|
|