Update index.html
Browse files- index.html +11 -18
index.html
CHANGED
|
@@ -112,26 +112,26 @@
|
|
| 112 |
const res = await fetch(path);
|
| 113 |
if (!res.ok) throw new Error(`Failed to fetch ${path}: ${res.status}`);
|
| 114 |
const code = await res.text();
|
| 115 |
-
const blob = new Blob([code], { type: "text/javascript" });
|
| 116 |
const url = URL.createObjectURL(blob);
|
| 117 |
try {
|
| 118 |
-
return await import(url);
|
| 119 |
} finally {
|
| 120 |
URL.revokeObjectURL(url);
|
| 121 |
}
|
| 122 |
}
|
| 123 |
-
|
| 124 |
// Use your local vendored file
|
| 125 |
const T = await importVendoredModule("./assets/vendor/transformers-3.0.0.js");
|
| 126 |
-
|
| 127 |
// Pull named exports from the ESM module
|
| 128 |
const { env, AutoTokenizer, AutoModelForCausalLM } = T;
|
| 129 |
-
|
| 130 |
// (Optional) expose for other inline modules that expect window.HF
|
| 131 |
window.HF = { env, AutoTokenizer, AutoModelForCausalLM };
|
| 132 |
-
|
| 133 |
// ----- Backend (no cross-origin isolation) -----
|
| 134 |
-
env.localModelPath = null
|
| 135 |
env.allowRemoteModels = false; // toggle to true only when using distilgpt2
|
| 136 |
env.useBrowserCache = true;
|
| 137 |
env.backends.onnx.webgpu = { enabled: false };
|
|
@@ -284,7 +284,7 @@
|
|
| 284 |
const p=index.get(t); if(!p) continue;
|
| 285 |
const [x,y]=toCanvas(p.x,p.y);
|
| 286 |
ctx.beginPath(); ctx.moveTo(bx,by); ctx.lineTo(x,y); ctx.stroke();
|
| 287 |
-
ctx.fillStyle="#9bd7ff"; ctx.fillRect(x-2,y-2,4,4);
|
| 288 |
}
|
| 289 |
ctx.fillStyle="#ffd166"; ctx.beginPath(); ctx.arc(bx,by,5,0,Math.PI*2); ctx.fill();
|
| 290 |
ctx.fillStyle="#e6f1ff"; ctx.font="12px ui-monospace, SFMono-Regular, Menlo, Consolas, monospace";
|
|
@@ -301,31 +301,24 @@
|
|
| 301 |
resetProgress("Tokenizer");
|
| 302 |
setStatus("Tokenizer: starting…");
|
| 303 |
const base = MODELS.qwen.local_href;
|
| 304 |
-
|
| 305 |
-
// Build absolute file URLs next to your ONNX
|
| 306 |
const tjsonURL = new URL("tokenizer.json", base).href;
|
| 307 |
const tcfgURL = new URL("tokenizer_config.json", base).href;
|
| 308 |
-
const smapURL = new URL("special_tokens_map.json", base).href;
|
| 309 |
-
|
| 310 |
-
// Preload exactly like Python does
|
| 311 |
const [tokJSON, tokCfgJSON, smapJSON] = await Promise.all([
|
| 312 |
fetch(tjsonURL).then(r => { if(!r.ok) throw new Error("tokenizer.json missing"); return r.json(); }),
|
| 313 |
fetch(tcfgURL).then(r => { if(!r.ok) throw new Error("tokenizer_config.json missing"); return r.json(); }),
|
| 314 |
fetch(smapURL).then(r => r.ok ? r.json() : null),
|
| 315 |
]);
|
| 316 |
-
|
| 317 |
-
// Provide a file map so transformers.js never tries to normalize a repo ID
|
| 318 |
const fileMap = new Map();
|
| 319 |
fileMap.set("tokenizer.json", new Blob([JSON.stringify(tokJSON)], { type: "application/json" }));
|
| 320 |
fileMap.set("tokenizer_config.json", new Blob([JSON.stringify(tokCfgJSON)], { type: "application/json" }));
|
| 321 |
if (smapJSON) {
|
| 322 |
fileMap.set("special_tokens_map.json", new Blob([JSON.stringify(smapJSON)], { type: "application/json" }));
|
| 323 |
}
|
| 324 |
-
|
| 325 |
tokenizer = await AutoTokenizer.from_pretrained("", {
|
| 326 |
files: fileMap,
|
| 327 |
progress_callback: onProgress,
|
| 328 |
-
local_files_only: true,
|
| 329 |
});
|
| 330 |
}
|
| 331 |
|
|
@@ -362,7 +355,7 @@
|
|
| 362 |
file_name: MODELS.qwen.onnx_file,
|
| 363 |
progress_callback: onProgress,
|
| 364 |
config: QWEN3_CONFIG_FIX,
|
| 365 |
-
local_files_only: true,
|
| 366 |
});
|
| 367 |
} else {
|
| 368 |
model = await AutoModelForCausalLM.from_pretrained(MODELS.distilgpt2.remote_repo, {
|
|
|
|
| 112 |
const res = await fetch(path);
|
| 113 |
if (!res.ok) throw new Error(`Failed to fetch ${path}: ${res.status}`);
|
| 114 |
const code = await res.text();
|
| 115 |
+
const blob = new Blob([code], { type: "text/javascript" });
|
| 116 |
const url = URL.createObjectURL(blob);
|
| 117 |
try {
|
| 118 |
+
return await import(url);
|
| 119 |
} finally {
|
| 120 |
URL.revokeObjectURL(url);
|
| 121 |
}
|
| 122 |
}
|
| 123 |
+
|
| 124 |
// Use your local vendored file
|
| 125 |
const T = await importVendoredModule("./assets/vendor/transformers-3.0.0.js");
|
| 126 |
+
|
| 127 |
// Pull named exports from the ESM module
|
| 128 |
const { env, AutoTokenizer, AutoModelForCausalLM } = T;
|
| 129 |
+
|
| 130 |
// (Optional) expose for other inline modules that expect window.HF
|
| 131 |
window.HF = { env, AutoTokenizer, AutoModelForCausalLM };
|
| 132 |
+
|
| 133 |
// ----- Backend (no cross-origin isolation) -----
|
| 134 |
+
env.localModelPath = ""; // <— CHANGED from null to empty string
|
| 135 |
env.allowRemoteModels = false; // toggle to true only when using distilgpt2
|
| 136 |
env.useBrowserCache = true;
|
| 137 |
env.backends.onnx.webgpu = { enabled: false };
|
|
|
|
| 284 |
const p=index.get(t); if(!p) continue;
|
| 285 |
const [x,y]=toCanvas(p.x,p.y);
|
| 286 |
ctx.beginPath(); ctx.moveTo(bx,by); ctx.lineTo(x,y); ctx.stroke();
|
| 287 |
+
ctx.fillStyle="#9bd7ff"; ctx.fillRect(x-2,y-2,4-0,4);
|
| 288 |
}
|
| 289 |
ctx.fillStyle="#ffd166"; ctx.beginPath(); ctx.arc(bx,by,5,0,Math.PI*2); ctx.fill();
|
| 290 |
ctx.fillStyle="#e6f1ff"; ctx.font="12px ui-monospace, SFMono-Regular, Menlo, Consolas, monospace";
|
|
|
|
| 301 |
resetProgress("Tokenizer");
|
| 302 |
setStatus("Tokenizer: starting…");
|
| 303 |
const base = MODELS.qwen.local_href;
|
|
|
|
|
|
|
| 304 |
const tjsonURL = new URL("tokenizer.json", base).href;
|
| 305 |
const tcfgURL = new URL("tokenizer_config.json", base).href;
|
| 306 |
+
const smapURL = new URL("special_tokens_map.json", base).href;
|
|
|
|
|
|
|
| 307 |
const [tokJSON, tokCfgJSON, smapJSON] = await Promise.all([
|
| 308 |
fetch(tjsonURL).then(r => { if(!r.ok) throw new Error("tokenizer.json missing"); return r.json(); }),
|
| 309 |
fetch(tcfgURL).then(r => { if(!r.ok) throw new Error("tokenizer_config.json missing"); return r.json(); }),
|
| 310 |
fetch(smapURL).then(r => r.ok ? r.json() : null),
|
| 311 |
]);
|
|
|
|
|
|
|
| 312 |
const fileMap = new Map();
|
| 313 |
fileMap.set("tokenizer.json", new Blob([JSON.stringify(tokJSON)], { type: "application/json" }));
|
| 314 |
fileMap.set("tokenizer_config.json", new Blob([JSON.stringify(tokCfgJSON)], { type: "application/json" }));
|
| 315 |
if (smapJSON) {
|
| 316 |
fileMap.set("special_tokens_map.json", new Blob([JSON.stringify(smapJSON)], { type: "application/json" }));
|
| 317 |
}
|
|
|
|
| 318 |
tokenizer = await AutoTokenizer.from_pretrained("", {
|
| 319 |
files: fileMap,
|
| 320 |
progress_callback: onProgress,
|
| 321 |
+
local_files_only: true,
|
| 322 |
});
|
| 323 |
}
|
| 324 |
|
|
|
|
| 355 |
file_name: MODELS.qwen.onnx_file,
|
| 356 |
progress_callback: onProgress,
|
| 357 |
config: QWEN3_CONFIG_FIX,
|
| 358 |
+
local_files_only: true,
|
| 359 |
});
|
| 360 |
} else {
|
| 361 |
model = await AutoModelForCausalLM.from_pretrained(MODELS.distilgpt2.remote_repo, {
|