Sarah Ciston
commited on
Commit
·
1cff79d
1
Parent(s):
de9037a
try nemo model
Browse files
README.md
CHANGED
@@ -6,8 +6,9 @@ colorTo: blue
|
|
6 |
sdk: static
|
7 |
pinned: false
|
8 |
models:
|
|
|
9 |
# - Xenova/distilgpt2
|
10 |
-
- HuggingFaceH4/zephyr-7b-beta
|
11 |
# - openai-community/gpt2
|
12 |
# - meta-llama/Meta-Llama-3-70B-Instruct
|
13 |
# - Xenova/detr-resnet-50
|
|
|
6 |
sdk: static
|
7 |
pinned: false
|
8 |
models:
|
9 |
+
- mistralai/Mistral-Nemo-Instruct-2407
|
10 |
# - Xenova/distilgpt2
|
11 |
+
# - HuggingFaceH4/zephyr-7b-beta
|
12 |
# - openai-community/gpt2
|
13 |
# - meta-llama/Meta-Llama-3-70B-Instruct
|
14 |
# - Xenova/detr-resnet-50
|
sketch.js
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
// connect to API via module
|
2 |
|
3 |
-
import {
|
|
|
|
|
4 |
// import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/inference@2.7.0/+esm';
|
5 |
// const inference = new HfInference();
|
6 |
|
@@ -158,7 +160,7 @@ new p5(function (p5) {
|
|
158 |
async function runModel(PREPROMPT, PROMPT){
|
159 |
// // Chat completion API
|
160 |
|
161 |
-
let MODELNAME = '
|
162 |
|
163 |
// 'meta-llama/Meta-Llama-3-70B-Instruct'
|
164 |
// 'openai-community/gpt2'
|
@@ -184,8 +186,14 @@ async function runModel(PREPROMPT, PROMPT){
|
|
184 |
// let out = await pipe(inputText)
|
185 |
|
186 |
let out = await pipe({
|
187 |
-
messages: [{
|
188 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
189 |
});
|
190 |
|
191 |
console.log(out)
|
|
|
1 |
// connect to API via module
|
2 |
|
3 |
+
// import { AutoTokenizer, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers';
|
4 |
+
import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers';
|
5 |
+
// import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers@2.10.1';
|
6 |
// import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/inference@2.7.0/+esm';
|
7 |
// const inference = new HfInference();
|
8 |
|
|
|
160 |
async function runModel(PREPROMPT, PROMPT){
|
161 |
// // Chat completion API
|
162 |
|
163 |
+
let MODELNAME = 'mistralai/Mistral-Nemo-Instruct-2407'
|
164 |
|
165 |
// 'meta-llama/Meta-Llama-3-70B-Instruct'
|
166 |
// 'openai-community/gpt2'
|
|
|
186 |
// let out = await pipe(inputText)
|
187 |
|
188 |
let out = await pipe({
|
189 |
+
messages: [{
|
190 |
+
role: "system",
|
191 |
+
content: PREPROMPT
|
192 |
+
},{
|
193 |
+
role: "user",
|
194 |
+
content: PROMPT
|
195 |
+
}],
|
196 |
+
max_new_tokens: 100
|
197 |
});
|
198 |
|
199 |
console.log(out)
|