radames commited on
Commit
f9fe51a
1 Parent(s): 159f74b

Upload 6 files

Browse files
Files changed (4) hide show
  1. build/m.js +27 -34
  2. build/m_bg.wasm +2 -2
  3. index.html +56 -48
  4. phiWorker.js +15 -2
build/m.js CHANGED
@@ -129,14 +129,6 @@ function handleError(f, args) {
129
  */
130
  export class Model {
131
 
132
- static __wrap(ptr) {
133
- ptr = ptr >>> 0;
134
- const obj = Object.create(Model.prototype);
135
- obj.__wbg_ptr = ptr;
136
-
137
- return obj;
138
- }
139
-
140
  __destroy_into_raw() {
141
  const ptr = this.__wbg_ptr;
142
  this.__wbg_ptr = 0;
@@ -170,7 +162,8 @@ export class Model {
170
  if (r2) {
171
  throw takeObject(r1);
172
  }
173
- return Model.__wrap(r0);
 
174
  } finally {
175
  wasm.__wbindgen_add_to_stack_pointer(16);
176
  }
@@ -302,10 +295,10 @@ function __wbg_get_imports() {
302
  imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
303
  takeObject(arg0);
304
  };
305
- imports.wbg.__wbg_log_082f7c89404ec861 = function(arg0, arg1) {
306
  console.log(getStringFromWasm0(arg0, arg1));
307
  };
308
- imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
309
  const ret = getObject(arg0).crypto;
310
  return addHeapObject(ret);
311
  };
@@ -314,15 +307,15 @@ function __wbg_get_imports() {
314
  const ret = typeof(val) === 'object' && val !== null;
315
  return ret;
316
  };
317
- imports.wbg.__wbg_process_298734cf255a885d = function(arg0) {
318
  const ret = getObject(arg0).process;
319
  return addHeapObject(ret);
320
  };
321
- imports.wbg.__wbg_versions_e2e78e134e3e5d01 = function(arg0) {
322
  const ret = getObject(arg0).versions;
323
  return addHeapObject(ret);
324
  };
325
- imports.wbg.__wbg_node_1cd7a5d853dbea79 = function(arg0) {
326
  const ret = getObject(arg0).node;
327
  return addHeapObject(ret);
328
  };
@@ -330,11 +323,11 @@ function __wbg_get_imports() {
330
  const ret = typeof(getObject(arg0)) === 'string';
331
  return ret;
332
  };
333
- imports.wbg.__wbg_msCrypto_bcb970640f50a1e8 = function(arg0) {
334
  const ret = getObject(arg0).msCrypto;
335
  return addHeapObject(ret);
336
  };
337
- imports.wbg.__wbg_require_8f08ceecec0f4fee = function() { return handleError(function () {
338
  const ret = module.require;
339
  return addHeapObject(ret);
340
  }, arguments) };
@@ -346,17 +339,17 @@ function __wbg_get_imports() {
346
  const ret = getStringFromWasm0(arg0, arg1);
347
  return addHeapObject(ret);
348
  };
349
- imports.wbg.__wbg_getRandomValues_37fa2ca9e4e07fab = function() { return handleError(function (arg0, arg1) {
350
- getObject(arg0).getRandomValues(getObject(arg1));
351
- }, arguments) };
352
- imports.wbg.__wbg_randomFillSync_dc1e9a60c158336d = function() { return handleError(function (arg0, arg1) {
353
  getObject(arg0).randomFillSync(takeObject(arg1));
354
  }, arguments) };
355
- imports.wbg.__wbg_newnoargs_581967eacc0e2604 = function(arg0, arg1) {
 
 
 
356
  const ret = new Function(getStringFromWasm0(arg0, arg1));
357
  return addHeapObject(ret);
358
  };
359
- imports.wbg.__wbg_call_cb65541d95d71282 = function() { return handleError(function (arg0, arg1) {
360
  const ret = getObject(arg0).call(getObject(arg1));
361
  return addHeapObject(ret);
362
  }, arguments) };
@@ -364,19 +357,19 @@ function __wbg_get_imports() {
364
  const ret = getObject(arg0);
365
  return addHeapObject(ret);
366
  };
367
- imports.wbg.__wbg_self_1ff1d729e9aae938 = function() { return handleError(function () {
368
  const ret = self.self;
369
  return addHeapObject(ret);
370
  }, arguments) };
371
- imports.wbg.__wbg_window_5f4faef6c12b79ec = function() { return handleError(function () {
372
  const ret = window.window;
373
  return addHeapObject(ret);
374
  }, arguments) };
375
- imports.wbg.__wbg_globalThis_1d39714405582d3c = function() { return handleError(function () {
376
  const ret = globalThis.globalThis;
377
  return addHeapObject(ret);
378
  }, arguments) };
379
- imports.wbg.__wbg_global_651f05c6a0944d1c = function() { return handleError(function () {
380
  const ret = global.global;
381
  return addHeapObject(ret);
382
  }, arguments) };
@@ -384,34 +377,34 @@ function __wbg_get_imports() {
384
  const ret = getObject(arg0) === undefined;
385
  return ret;
386
  };
387
- imports.wbg.__wbg_call_01734de55d61e11d = function() { return handleError(function (arg0, arg1, arg2) {
388
  const ret = getObject(arg0).call(getObject(arg1), getObject(arg2));
389
  return addHeapObject(ret);
390
  }, arguments) };
391
- imports.wbg.__wbg_now_9c5990bda04c7e53 = function() {
392
  const ret = Date.now();
393
  return ret;
394
  };
395
- imports.wbg.__wbg_buffer_085ec1f694018c4f = function(arg0) {
396
  const ret = getObject(arg0).buffer;
397
  return addHeapObject(ret);
398
  };
399
- imports.wbg.__wbg_newwithbyteoffsetandlength_6da8e527659b86aa = function(arg0, arg1, arg2) {
400
  const ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0);
401
  return addHeapObject(ret);
402
  };
403
- imports.wbg.__wbg_new_8125e318e6245eed = function(arg0) {
404
  const ret = new Uint8Array(getObject(arg0));
405
  return addHeapObject(ret);
406
  };
407
- imports.wbg.__wbg_set_5cf90238115182c3 = function(arg0, arg1, arg2) {
408
  getObject(arg0).set(getObject(arg1), arg2 >>> 0);
409
  };
410
- imports.wbg.__wbg_newwithlength_e5d69174d6984cd7 = function(arg0) {
411
  const ret = new Uint8Array(arg0 >>> 0);
412
  return addHeapObject(ret);
413
  };
414
- imports.wbg.__wbg_subarray_13db269f57aa838d = function(arg0, arg1, arg2) {
415
  const ret = getObject(arg0).subarray(arg1 >>> 0, arg2 >>> 0);
416
  return addHeapObject(ret);
417
  };
 
129
  */
130
  export class Model {
131
 
 
 
 
 
 
 
 
 
132
  __destroy_into_raw() {
133
  const ptr = this.__wbg_ptr;
134
  this.__wbg_ptr = 0;
 
162
  if (r2) {
163
  throw takeObject(r1);
164
  }
165
+ this.__wbg_ptr = r0 >>> 0;
166
+ return this;
167
  } finally {
168
  wasm.__wbindgen_add_to_stack_pointer(16);
169
  }
 
295
  imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
296
  takeObject(arg0);
297
  };
298
+ imports.wbg.__wbg_log_aa756f7b1647d2ab = function(arg0, arg1) {
299
  console.log(getStringFromWasm0(arg0, arg1));
300
  };
301
+ imports.wbg.__wbg_crypto_58f13aa23ffcb166 = function(arg0) {
302
  const ret = getObject(arg0).crypto;
303
  return addHeapObject(ret);
304
  };
 
307
  const ret = typeof(val) === 'object' && val !== null;
308
  return ret;
309
  };
310
+ imports.wbg.__wbg_process_5b786e71d465a513 = function(arg0) {
311
  const ret = getObject(arg0).process;
312
  return addHeapObject(ret);
313
  };
314
+ imports.wbg.__wbg_versions_c2ab80650590b6a2 = function(arg0) {
315
  const ret = getObject(arg0).versions;
316
  return addHeapObject(ret);
317
  };
318
+ imports.wbg.__wbg_node_523d7bd03ef69fba = function(arg0) {
319
  const ret = getObject(arg0).node;
320
  return addHeapObject(ret);
321
  };
 
323
  const ret = typeof(getObject(arg0)) === 'string';
324
  return ret;
325
  };
326
+ imports.wbg.__wbg_msCrypto_abcb1295e768d1f2 = function(arg0) {
327
  const ret = getObject(arg0).msCrypto;
328
  return addHeapObject(ret);
329
  };
330
+ imports.wbg.__wbg_require_2784e593a4674877 = function() { return handleError(function () {
331
  const ret = module.require;
332
  return addHeapObject(ret);
333
  }, arguments) };
 
339
  const ret = getStringFromWasm0(arg0, arg1);
340
  return addHeapObject(ret);
341
  };
342
+ imports.wbg.__wbg_randomFillSync_a0d98aa11c81fe89 = function() { return handleError(function (arg0, arg1) {
 
 
 
343
  getObject(arg0).randomFillSync(takeObject(arg1));
344
  }, arguments) };
345
+ imports.wbg.__wbg_getRandomValues_504510b5564925af = function() { return handleError(function (arg0, arg1) {
346
+ getObject(arg0).getRandomValues(getObject(arg1));
347
+ }, arguments) };
348
+ imports.wbg.__wbg_newnoargs_ccdcae30fd002262 = function(arg0, arg1) {
349
  const ret = new Function(getStringFromWasm0(arg0, arg1));
350
  return addHeapObject(ret);
351
  };
352
+ imports.wbg.__wbg_call_669127b9d730c650 = function() { return handleError(function (arg0, arg1) {
353
  const ret = getObject(arg0).call(getObject(arg1));
354
  return addHeapObject(ret);
355
  }, arguments) };
 
357
  const ret = getObject(arg0);
358
  return addHeapObject(ret);
359
  };
360
+ imports.wbg.__wbg_self_3fad056edded10bd = function() { return handleError(function () {
361
  const ret = self.self;
362
  return addHeapObject(ret);
363
  }, arguments) };
364
+ imports.wbg.__wbg_window_a4f46c98a61d4089 = function() { return handleError(function () {
365
  const ret = window.window;
366
  return addHeapObject(ret);
367
  }, arguments) };
368
+ imports.wbg.__wbg_globalThis_17eff828815f7d84 = function() { return handleError(function () {
369
  const ret = globalThis.globalThis;
370
  return addHeapObject(ret);
371
  }, arguments) };
372
+ imports.wbg.__wbg_global_46f939f6541643c5 = function() { return handleError(function () {
373
  const ret = global.global;
374
  return addHeapObject(ret);
375
  }, arguments) };
 
377
  const ret = getObject(arg0) === undefined;
378
  return ret;
379
  };
380
+ imports.wbg.__wbg_call_53fc3abd42e24ec8 = function() { return handleError(function (arg0, arg1, arg2) {
381
  const ret = getObject(arg0).call(getObject(arg1), getObject(arg2));
382
  return addHeapObject(ret);
383
  }, arguments) };
384
+ imports.wbg.__wbg_now_4579335d3581594c = function() {
385
  const ret = Date.now();
386
  return ret;
387
  };
388
+ imports.wbg.__wbg_buffer_344d9b41efe96da7 = function(arg0) {
389
  const ret = getObject(arg0).buffer;
390
  return addHeapObject(ret);
391
  };
392
+ imports.wbg.__wbg_newwithbyteoffsetandlength_2dc04d99088b15e3 = function(arg0, arg1, arg2) {
393
  const ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0);
394
  return addHeapObject(ret);
395
  };
396
+ imports.wbg.__wbg_new_d8a000788389a31e = function(arg0) {
397
  const ret = new Uint8Array(getObject(arg0));
398
  return addHeapObject(ret);
399
  };
400
+ imports.wbg.__wbg_set_dcfd613a3420f908 = function(arg0, arg1, arg2) {
401
  getObject(arg0).set(getObject(arg1), arg2 >>> 0);
402
  };
403
+ imports.wbg.__wbg_newwithlength_13b5319ab422dcf6 = function(arg0) {
404
  const ret = new Uint8Array(arg0 >>> 0);
405
  return addHeapObject(ret);
406
  };
407
+ imports.wbg.__wbg_subarray_6ca5cfa7fbb9abbe = function(arg0, arg1, arg2) {
408
  const ret = getObject(arg0).subarray(arg1 >>> 0, arg2 >>> 0);
409
  return addHeapObject(ret);
410
  };
build/m_bg.wasm CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:31acdbb8b18f216db0b64e628bd398f63260b35b6dfb3d1e23d01d757b8b9163
3
- size 4544545
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a62b31d8872b8c4c1f0f98dad8f2375d05ad120060731eb02ca21f827d188ac5
3
+ size 4566571
index.html CHANGED
@@ -1,7 +1,7 @@
1
  <html>
2
  <head>
3
  <meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
4
- <title>Candle Phi 1.5 Rust/WASM</title>
5
  </head>
6
  <body></body>
7
  </html>
@@ -38,10 +38,10 @@
38
  import snarkdown from "https://cdn.skypack.dev/snarkdown";
39
  import hljs from "https://cdn.skypack.dev/highlight.js";
40
  // models base url
41
- const MODELS = {
42
- phi_1_5_quantized: {
43
- base_url:
44
- "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
45
  model: "model-q4k.gguf",
46
  tokenizer: "tokenizer.json",
47
  config: "phi-1_5.json",
@@ -49,9 +49,9 @@
49
  seq_len: 2048,
50
  size: "800 MB",
51
  },
52
- phi_1_5_quantized_2: {
53
- base_url:
54
- "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
55
  model: "model-q80.gguf",
56
  tokenizer: "tokenizer.json",
57
  config: "phi-1_5.json",
@@ -59,9 +59,19 @@
59
  seq_len: 2048,
60
  size: "1.51 GB",
61
  },
62
- puffin_phi_v2_quantized: {
63
- base_url:
64
- "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
 
 
 
 
 
 
 
 
 
 
65
  model: "model-puffin-phi-v2-q4k.gguf",
66
  tokenizer: "tokenizer-puffin-phi-v2.json",
67
  config: "puffin-phi-v2.json",
@@ -69,9 +79,9 @@
69
  seq_len: 2048,
70
  size: "798 MB",
71
  },
72
- puffin_phi_v2_quantized_2: {
73
- base_url:
74
- "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
75
  model: "model-puffin-phi-v2-q80.gguf",
76
  tokenizer: "tokenizer-puffin-phi-v2.json",
77
  config: "puffin-phi-v2.json",
@@ -106,8 +116,8 @@ Let’s think step by step.`,
106
  },
107
  {
108
  title: "Question answering",
109
- prompt: `What is the capital of France?
110
- Answer:`,
111
  },
112
  {
113
  title: "Chat mode",
@@ -148,7 +158,7 @@ Very polite review:`,
148
  const getValue = (id) => document.querySelector(`#${id}`).value;
149
  const modelID = getValue("model");
150
  const model = MODELS[modelID];
151
- const weightsURL = model.base_url + model.model;
152
  const tokenizerURL = model.base_url + model.tokenizer;
153
  const configURL = model.base_url + model.config;
154
 
@@ -246,6 +256,13 @@ Very polite review:`,
246
  option.innerText = `${id} (${model.size})`;
247
  modelSelect.appendChild(option);
248
  }
 
 
 
 
 
 
 
249
 
250
  for (const [i, { title, prompt }] of TEMPLATES.entries()) {
251
  const div = document.createElement("div");
@@ -257,7 +274,7 @@ Very polite review:`,
257
  input.value = prompt;
258
  const label = document.createElement("label");
259
  label.htmlFor = `templates-${i}`;
260
- label.classList.add("cursor-pointer", "px-1");
261
  label.innerText = title;
262
  div.appendChild(input);
263
  div.appendChild(label);
@@ -272,6 +289,14 @@ Very polite review:`,
272
  prompt.style.height = prompt.scrollHeight + "px";
273
  });
274
  modelSelect.addEventListener("change", (e) => {
 
 
 
 
 
 
 
 
275
  const model = MODELS[e.target.value];
276
  document.querySelector("#max-seq").max = model.seq_len;
277
  document.querySelector("#max-seq").nextElementSibling.value = 200;
@@ -320,42 +345,25 @@ Very polite review:`,
320
  <main class="grid grid-cols-1 gap-8 relative">
321
  <span class="absolute text-5xl -ml-[1em]"> 🕯️ </span>
322
  <div>
323
- <h1 class="text-5xl font-bold">Candle Phi 1.5</h1>
324
  <h2 class="text-2xl font-bold">Rust/WASM Demo</h2>
325
  <p class="max-w-lg">
326
  The
327
- <a
328
- href="https://huggingface.co/microsoft/phi-1_5"
329
- class="link"
330
- target="_blank"
331
- >Phi-1.5</a
332
- >
333
- model achieves state-of-the-art performance with only 1.3 billion
334
- parameters, compared to models with up to 10 billion. You can try the
335
- quantized version of the model here. Additional prompt examples are
336
  available in the
337
- <a
338
- href="https://arxiv.org/pdf/2309.05463.pdf#page=8"
339
- class="link"
340
- target="_blank"
341
- >
342
- technical report </a
343
- >.
344
  </p>
345
  <p class="max-w-lg">
346
  You can also try
347
- <a
348
- href="https://huggingface.co/teknium/Puffin-Phi-v2"
349
- class="link"
350
- target="_blank"
351
- >Puffin-Phi V2
352
  </a>
353
- quantized version model, a fine-tuned version of Phi-1.5 on the
354
- <a
355
- href="https://huggingface.co/datasets/LDJnr/Puffin"
356
- class="link"
357
- target="_blank"
358
- >Puffin dataset
359
  </a>
360
  </p>
361
  </div>
@@ -390,8 +398,8 @@ Very polite review:`,
390
  oninput="this.style.height = 0;this.style.height = this.scrollHeight + 'px'"
391
  placeholder="Add your prompt here..."
392
  >
393
- Write a detailed analogy between mathematics and a lighthouse.
394
- Answer:</textarea
395
  >
396
  <button id="clear-btn">
397
  <svg
 
1
  <html>
2
  <head>
3
  <meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
4
+ <title>Candle Phi 1.5 / Phi 2.0 Rust/WASM</title>
5
  </head>
6
  <body></body>
7
  </html>
 
38
  import snarkdown from "https://cdn.skypack.dev/snarkdown";
39
  import hljs from "https://cdn.skypack.dev/highlight.js";
40
  // models base url
41
+ const MODELS = {
42
+ phi_1_5_q4k: {
43
+ base_url:
44
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
45
  model: "model-q4k.gguf",
46
  tokenizer: "tokenizer.json",
47
  config: "phi-1_5.json",
 
49
  seq_len: 2048,
50
  size: "800 MB",
51
  },
52
+ phi_1_5_q80: {
53
+ base_url:
54
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
55
  model: "model-q80.gguf",
56
  tokenizer: "tokenizer.json",
57
  config: "phi-1_5.json",
 
59
  seq_len: 2048,
60
  size: "1.51 GB",
61
  },
62
+ phi_2_0_q4k: {
63
+ base_url:
64
+ "https://huggingface.co/radames/phi-2-quantized/resolve/main/",
65
+ model: ["model-v2-q4k.gguf_aa.part", "model-v2-q4k.gguf_ab.part", "model-v2-q4k.gguf_ac.part"],
66
+ tokenizer: "tokenizer.json",
67
+ config: "config.json",
68
+ quantized: true,
69
+ seq_len: 2048,
70
+ size: "1.57GB",
71
+ },
72
+ puffin_phi_v2_q4k: {
73
+ base_url:
74
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
75
  model: "model-puffin-phi-v2-q4k.gguf",
76
  tokenizer: "tokenizer-puffin-phi-v2.json",
77
  config: "puffin-phi-v2.json",
 
79
  seq_len: 2048,
80
  size: "798 MB",
81
  },
82
+ puffin_phi_v2_q80: {
83
+ base_url:
84
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
85
  model: "model-puffin-phi-v2-q80.gguf",
86
  tokenizer: "tokenizer-puffin-phi-v2.json",
87
  config: "puffin-phi-v2.json",
 
116
  },
117
  {
118
  title: "Question answering",
119
+ prompt: `Instruct: What is the capital of France?
120
+ Output:`,
121
  },
122
  {
123
  title: "Chat mode",
 
158
  const getValue = (id) => document.querySelector(`#${id}`).value;
159
  const modelID = getValue("model");
160
  const model = MODELS[modelID];
161
+ const weightsURL = model.model instanceof Array ? model.model.map((m) => model.base_url + m) : model.base_url + model.model;
162
  const tokenizerURL = model.base_url + model.tokenizer;
163
  const configURL = model.base_url + model.config;
164
 
 
256
  option.innerText = `${id} (${model.size})`;
257
  modelSelect.appendChild(option);
258
  }
259
+ const query = new URLSearchParams(window.location.search);
260
+ const modelID = query.get("model");
261
+ if (modelID) {
262
+ modelSelect.value = modelID;
263
+ } else {
264
+ modelSelect.value = "phi_1_5_q4k";
265
+ }
266
 
267
  for (const [i, { title, prompt }] of TEMPLATES.entries()) {
268
  const div = document.createElement("div");
 
274
  input.value = prompt;
275
  const label = document.createElement("label");
276
  label.htmlFor = `templates-${i}`;
277
+ label.classList.add("cursor-pointer");
278
  label.innerText = title;
279
  div.appendChild(input);
280
  div.appendChild(label);
 
289
  prompt.style.height = prompt.scrollHeight + "px";
290
  });
291
  modelSelect.addEventListener("change", (e) => {
292
+ const query = new URLSearchParams(window.location.search);
293
+ query.set("model", e.target.value);
294
+ window.history.replaceState(
295
+ { },
296
+ "",
297
+ `${window.location.pathname}?${query}`
298
+ );
299
+ window.parent.postMessage({queryString: "?" + query }, "*")
300
  const model = MODELS[e.target.value];
301
  document.querySelector("#max-seq").max = model.seq_len;
302
  document.querySelector("#max-seq").nextElementSibling.value = 200;
 
345
  <main class="grid grid-cols-1 gap-8 relative">
346
  <span class="absolute text-5xl -ml-[1em]"> 🕯️ </span>
347
  <div>
348
+ <h1 class="text-5xl font-bold">Candle Phi 1.5 / Phi 2.0 </h1>
349
  <h2 class="text-2xl font-bold">Rust/WASM Demo</h2>
350
  <p class="max-w-lg">
351
  The
352
+ <a href="https://huggingface.co/microsoft/phi-1_5" class="link" target="_blank">Phi-1.5</a> and
353
+ <a href="https://huggingface.co/microsoft/phi-2" class="link" target="_blank">Phi-2</a> models achieves
354
+ state-of-the-art performance with only 1.3 billion and 2.7 billion parameters, compared to larger models with up
355
+ to 13
356
+ billion parameters. Here you can try the quantized versions. Additional prompt examples are
 
 
 
 
357
  available in the
358
+ <a href="https://arxiv.org/pdf/2309.05463.pdf#page=8" class="link" target="_blank">
359
+ technical report </a>.
 
 
 
 
 
360
  </p>
361
  <p class="max-w-lg">
362
  You can also try
363
+ <a href="https://huggingface.co/teknium/Puffin-Phi-v2" class="link" target="_blank">Puffin-Phi V2
 
 
 
 
364
  </a>
365
+ quantized version, a fine-tuned version of Phi-1.5 on the
366
+ <a href="https://huggingface.co/datasets/LDJnr/Puffin" class="link" target="_blank">Puffin dataset
 
 
 
 
367
  </a>
368
  </p>
369
  </div>
 
398
  oninput="this.style.height = 0;this.style.height = this.scrollHeight + 'px'"
399
  placeholder="Add your prompt here..."
400
  >
401
+ Instruct: Write a detailed analogy between mathematics and a lighthouse.
402
+ Output:</textarea
403
  >
404
  <button id="clear-btn">
405
  <svg
phiWorker.js CHANGED
@@ -12,6 +12,20 @@ async function fetchArrayBuffer(url) {
12
  cache.put(url, res.clone());
13
  return new Uint8Array(await res.arrayBuffer());
14
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  class Phi {
16
  static instance = {};
17
 
@@ -27,10 +41,9 @@ class Phi {
27
  await init();
28
 
29
  self.postMessage({ status: "loading", message: "Loading Model" });
30
-
31
  const [weightsArrayU8, tokenizerArrayU8, configArrayU8] =
32
  await Promise.all([
33
- fetchArrayBuffer(weightsURL),
34
  fetchArrayBuffer(tokenizerURL),
35
  fetchArrayBuffer(configURL),
36
  ]);
 
12
  cache.put(url, res.clone());
13
  return new Uint8Array(await res.arrayBuffer());
14
  }
15
+ async function concatenateArrayBuffers(urls) {
16
+ const arrayBuffers = await Promise.all(urls.map(url => fetchArrayBuffer(url)));
17
+
18
+ let totalLength = arrayBuffers.reduce((acc, arrayBuffer) => acc + arrayBuffer.byteLength, 0);
19
+ let concatenatedBuffer = new Uint8Array(totalLength);
20
+
21
+ let offset = 0;
22
+ arrayBuffers.forEach(buffer => {
23
+ concatenatedBuffer.set(new Uint8Array(buffer), offset);
24
+ offset += buffer.byteLength;
25
+ });
26
+ return concatenatedBuffer;
27
+ }
28
+
29
  class Phi {
30
  static instance = {};
31
 
 
41
  await init();
42
 
43
  self.postMessage({ status: "loading", message: "Loading Model" });
 
44
  const [weightsArrayU8, tokenizerArrayU8, configArrayU8] =
45
  await Promise.all([
46
+ weightsURL instanceof Array ? concatenateArrayBuffers(weightsURL) : fetchArrayBuffer(weightsURL),
47
  fetchArrayBuffer(tokenizerURL),
48
  fetchArrayBuffer(configURL),
49
  ]);