Sarah Ciston
commited on
Commit
·
1455cf4
1
Parent(s):
614d0f9
remove env var for inference mode
Browse files
sketch.js
CHANGED
@@ -8,7 +8,7 @@ const inference = new HfInference();
|
|
8 |
|
9 |
|
10 |
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
|
11 |
-
env.allowLocalModels = false;
|
12 |
|
13 |
let promptButton, buttonButton, promptInput, maskInputA, maskInputB, maskInputC, modOutput
|
14 |
// const detector = await pipeline('text-generation', 'meta-llama/Meta-Llama-3-8B', 'Xenova/LaMini-Flan-T5-783M');
|
@@ -39,7 +39,7 @@ const out = await inference.chatCompletion({
|
|
39 |
// });
|
40 |
console.log(out)
|
41 |
|
42 |
-
var result = await out.choices[0].message
|
43 |
// var result = await out[0].generated_text
|
44 |
console.log(result);
|
45 |
|
|
|
8 |
|
9 |
|
10 |
// Since we will download the model from the Hugging Face Hub, we can skip the local model check
|
11 |
+
// env.allowLocalModels = false;
|
12 |
|
13 |
let promptButton, buttonButton, promptInput, maskInputA, maskInputB, maskInputC, modOutput
|
14 |
// const detector = await pipeline('text-generation', 'meta-llama/Meta-Llama-3-8B', 'Xenova/LaMini-Flan-T5-783M');
|
|
|
39 |
// });
|
40 |
console.log(out)
|
41 |
|
42 |
+
var result = await out.choices[0].message.content
|
43 |
// var result = await out[0].generated_text
|
44 |
console.log(result);
|
45 |
|