PeterPinetree commited on
Commit
e773545
·
1 Parent(s): bf9105e

Update index.html

Browse files
Files changed (1) hide show
  1. index.html +4 -4
index.html CHANGED
@@ -286,8 +286,8 @@
286
  if (mySeq !== loadSeq) return;
287
 
288
  setStatus("Warming up…");
289
- const enc = await tokenizer(" ", { add_special_tokens:false });
290
- await model(enc.input_ids, { attention_mask: enc.attention_mask });
291
  if (mySeq !== loadSeq) return;
292
  setStatus("Ready");
293
  }
@@ -295,9 +295,9 @@
295
  /* ---------- Next-token logic ---------- */
296
  async function greedyNext(text, topK=10){
297
  if (!tokenizer || !model) { setErr("Model not loaded yet — check the status bar."); return {rows:[],dt:0}; }
298
- const enc = await tokenizer(text || " ", { add_special_tokens:false });
299
  const t0 = performance.now();
300
- const out = await model(enc.input_ids, { attention_mask: enc.attention_mask });
301
  const dt = (performance.now() - t0) | 0;
302
 
303
  const last = out.logits[out.logits.length - 1];
 
286
  if (mySeq !== loadSeq) return;
287
 
288
  setStatus("Warming up…");
289
+ const enc = await tokenizer(" ", { add_special_tokens: false, return_attention_mask: true });
290
+ await model({ input_ids: enc.input_ids, attention_mask: enc.attention_mask });
291
  if (mySeq !== loadSeq) return;
292
  setStatus("Ready");
293
  }
 
295
  /* ---------- Next-token logic ---------- */
296
  async function greedyNext(text, topK=10){
297
  if (!tokenizer || !model) { setErr("Model not loaded yet — check the status bar."); return {rows:[],dt:0}; }
298
+ const enc = await tokenizer(text || " ", { add_special_tokens: false, return_attention_mask: true });
299
  const t0 = performance.now();
300
+ const out = await model({ input_ids: enc.input_ids, attention_mask: enc.attention_mask });
301
  const dt = (performance.now() - t0) | 0;
302
 
303
  const last = out.logits[out.logits.length - 1];