radames HF staff commited on
Commit
051c520
1 Parent(s): 7042210

Upload 6 files

Browse files
Files changed (6) hide show
  1. build/m.d.ts +3 -2
  2. build/m.js +6 -3
  3. build/m_bg.wasm +2 -2
  4. build/m_bg.wasm.d.ts +1 -1
  5. index.html +88 -26
  6. phiWorker.js +16 -5
build/m.d.ts CHANGED
@@ -7,9 +7,10 @@ export class Model {
7
  /**
8
  * @param {Uint8Array} weights
9
  * @param {Uint8Array} tokenizer
 
10
  * @param {boolean} quantized
11
  */
12
- constructor(weights: Uint8Array, tokenizer: Uint8Array, quantized: boolean);
13
  /**
14
  * @param {string} prompt
15
  * @param {number} temp
@@ -31,7 +32,7 @@ export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembl
31
  export interface InitOutput {
32
  readonly memory: WebAssembly.Memory;
33
  readonly __wbg_model_free: (a: number) => void;
34
- readonly model_load: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
35
  readonly model_init_with_prompt: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => void;
36
  readonly model_next_token: (a: number, b: number) => void;
37
  readonly main: (a: number, b: number) => number;
 
7
  /**
8
  * @param {Uint8Array} weights
9
  * @param {Uint8Array} tokenizer
10
+ * @param {Uint8Array} config
11
  * @param {boolean} quantized
12
  */
13
+ constructor(weights: Uint8Array, tokenizer: Uint8Array, config: Uint8Array, quantized: boolean);
14
  /**
15
  * @param {string} prompt
16
  * @param {number} temp
 
32
  export interface InitOutput {
33
  readonly memory: WebAssembly.Memory;
34
  readonly __wbg_model_free: (a: number) => void;
35
+ readonly model_load: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void;
36
  readonly model_init_with_prompt: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number) => void;
37
  readonly model_next_token: (a: number, b: number) => void;
38
  readonly main: (a: number, b: number) => number;
build/m.js CHANGED
@@ -151,16 +151,19 @@ export class Model {
151
  /**
152
  * @param {Uint8Array} weights
153
  * @param {Uint8Array} tokenizer
 
154
  * @param {boolean} quantized
155
  */
156
- constructor(weights, tokenizer, quantized) {
157
  try {
158
  const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
159
  const ptr0 = passArray8ToWasm0(weights, wasm.__wbindgen_malloc);
160
  const len0 = WASM_VECTOR_LEN;
161
  const ptr1 = passArray8ToWasm0(tokenizer, wasm.__wbindgen_malloc);
162
  const len1 = WASM_VECTOR_LEN;
163
- wasm.model_load(retptr, ptr0, len0, ptr1, len1, quantized);
 
 
164
  var r0 = getInt32Memory0()[retptr / 4 + 0];
165
  var r1 = getInt32Memory0()[retptr / 4 + 1];
166
  var r2 = getInt32Memory0()[retptr / 4 + 2];
@@ -299,7 +302,7 @@ function __wbg_get_imports() {
299
  imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
300
  takeObject(arg0);
301
  };
302
- imports.wbg.__wbg_log_ff7e0b5e6573cdff = function(arg0, arg1) {
303
  console.log(getStringFromWasm0(arg0, arg1));
304
  };
305
  imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
 
151
  /**
152
  * @param {Uint8Array} weights
153
  * @param {Uint8Array} tokenizer
154
+ * @param {Uint8Array} config
155
  * @param {boolean} quantized
156
  */
157
+ constructor(weights, tokenizer, config, quantized) {
158
  try {
159
  const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
160
  const ptr0 = passArray8ToWasm0(weights, wasm.__wbindgen_malloc);
161
  const len0 = WASM_VECTOR_LEN;
162
  const ptr1 = passArray8ToWasm0(tokenizer, wasm.__wbindgen_malloc);
163
  const len1 = WASM_VECTOR_LEN;
164
+ const ptr2 = passArray8ToWasm0(config, wasm.__wbindgen_malloc);
165
+ const len2 = WASM_VECTOR_LEN;
166
+ wasm.model_load(retptr, ptr0, len0, ptr1, len1, ptr2, len2, quantized);
167
  var r0 = getInt32Memory0()[retptr / 4 + 0];
168
  var r1 = getInt32Memory0()[retptr / 4 + 1];
169
  var r2 = getInt32Memory0()[retptr / 4 + 2];
 
302
  imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
303
  takeObject(arg0);
304
  };
305
+ imports.wbg.__wbg_log_082f7c89404ec861 = function(arg0, arg1) {
306
  console.log(getStringFromWasm0(arg0, arg1));
307
  };
308
  imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
build/m_bg.wasm CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f38c8d88da95e96b33813a5e6f0479028ec4c550ea4a055082d2c5ff129605f
3
- size 4368129
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31acdbb8b18f216db0b64e628bd398f63260b35b6dfb3d1e23d01d757b8b9163
3
+ size 4544545
build/m_bg.wasm.d.ts CHANGED
@@ -2,7 +2,7 @@
2
  /* eslint-disable */
3
  export const memory: WebAssembly.Memory;
4
  export function __wbg_model_free(a: number): void;
5
- export function model_load(a: number, b: number, c: number, d: number, e: number, f: number): void;
6
  export function model_init_with_prompt(a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number): void;
7
  export function model_next_token(a: number, b: number): void;
8
  export function main(a: number, b: number): number;
 
2
  /* eslint-disable */
3
  export const memory: WebAssembly.Memory;
4
  export function __wbg_model_free(a: number): void;
5
+ export function model_load(a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number): void;
6
  export function model_init_with_prompt(a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number, i: number): void;
7
  export function model_next_token(a: number, b: number): void;
8
  export function main(a: number, b: number): number;
index.html CHANGED
@@ -13,7 +13,8 @@
13
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
14
  <link
15
  rel="stylesheet"
16
- href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.8.0/build/styles/default.min.css" />
 
17
  <style>
18
  @import url("https://fonts.googleapis.com/css2?family=Source+Code+Pro:wght@200;300;400&family=Source+Sans+3:wght@100;200;300;400;500;600;700;800;900&display=swap");
19
  html,
@@ -36,22 +37,41 @@
36
  <script type="module">
37
  import snarkdown from "https://cdn.skypack.dev/snarkdown";
38
  import hljs from "https://cdn.skypack.dev/highlight.js";
39
-
40
- const TOKENIZER_URL =
41
- "https://huggingface.co/microsoft/phi-1_5/raw/main/tokenizer.json";
42
  // models base url
43
  const MODELS = {
44
  phi_1_5_quantized: {
45
  base_url:
46
- "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
47
  model: "model-q4k.gguf",
 
 
48
  quantized: true,
49
  seq_len: 2048,
50
  },
51
  phi_1_5_quantized_2: {
52
  base_url:
53
- "https://huggingface.co/lmz/candle-quantized-phi/resolve/main/",
54
  model: "model-q80.gguf",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  quantized: true,
56
  seq_len: 2048,
57
  },
@@ -65,6 +85,8 @@
65
  const modelID = getValue("model");
66
  const model = MODELS[modelID];
67
  const weightsURL = model.base_url + model.model;
 
 
68
 
69
  const prompt = getValue("prompt").trim();
70
  const temperature = getValue("temperature");
@@ -107,7 +129,8 @@
107
  phiWorker.postMessage({
108
  weightsURL,
109
  modelID,
110
- tokenizerURL: TOKENIZER_URL,
 
111
  quantized: model.quantized,
112
  prompt,
113
  temp: temperature,
@@ -217,10 +240,27 @@
217
  <a
218
  href="https://arxiv.org/pdf/2309.05463.pdf#page=8"
219
  class="link"
220
- target="_blank">
 
221
  technical report </a
222
  >.
223
  </p>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
224
  </div>
225
  <div>
226
  <p class="text-xs italic max-w-lg">
@@ -234,26 +274,34 @@
234
  <label for="model" class="font-medium">Models Options: </label>
235
  <select
236
  id="model"
237
- class="border-2 border-gray-500 rounded-md font-light">
 
238
  <option value="phi_1_5_quantized" selected>
239
  phi 1.5 quantized q4k (800 MB)
240
  </option>
241
  <option value="phi_1_5_quantized_2">
242
  phi 1.5 quantized q80 (1.51 GB)
243
  </option>
244
- <!-- <option value="phi_1_5">phi 1.5 (2.84 GB)</option> -->
 
 
 
 
 
245
  </select>
246
  </div>
247
  <form
248
  id="form"
249
- class="flex text-normal px-1 py-1 border border-gray-700 rounded-md items-center">
 
250
  <input type="submit" hidden />
251
  <textarea
252
  type="text"
253
  id="prompt"
254
  class="font-light w-full px-3 py-2 mx-1 resize-none outline-none"
255
  oninput="this.style.height = 0;this.style.height = this.scrollHeight + 'px'"
256
- placeholder="Add your prompt here...">
 
257
  Write a detailed analogy between mathematics and a lighthouse.
258
  Answer:</textarea
259
  >
@@ -262,18 +310,21 @@ Answer:</textarea
262
  fill="none"
263
  xmlns="http://www.w3.org/2000/svg"
264
  width="40"
265
- viewBox="0 0 70 40">
 
266
  <path opacity=".5" d="M39 .2v40.2" stroke="#1F2937" />
267
  <path
268
  d="M1.5 11.5 19 29.1m0-17.6L1.5 29.1"
269
  opacity=".5"
270
  stroke="#1F2937"
271
- stroke-width="2" />
 
272
  </svg>
273
  </button>
274
  <button
275
  id="run"
276
- class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-2 w-16 rounded disabled:bg-gray-300 disabled:cursor-not-allowed">
 
277
  Run
278
  </button>
279
  </form>
@@ -292,9 +343,11 @@ Answer:</textarea
292
  max="2048"
293
  step="1"
294
  value="200"
295
- oninput="this.nextElementSibling.value = Number(this.value)" />
 
296
  <output
297
- class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md">
 
298
  200</output
299
  >
300
  <label class="text-sm font-medium" for="temperature"
@@ -308,9 +361,11 @@ Answer:</textarea
308
  max="2"
309
  step="0.01"
310
  value="0.00"
311
- oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)" />
 
312
  <output
313
- class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md">
 
314
  0.00</output
315
  >
316
  <label class="text-sm font-medium" for="top-p">Top-p</label>
@@ -322,9 +377,11 @@ Answer:</textarea
322
  max="1"
323
  step="0.01"
324
  value="1.00"
325
- oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)" />
 
326
  <output
327
- class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md">
 
328
  1.00</output
329
  >
330
 
@@ -340,7 +397,8 @@ Answer:</textarea
340
  max="2"
341
  step="0.01"
342
  value="1.10"
343
- oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)" />
 
344
  <output
345
  class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md"
346
  >1.10</output
@@ -351,11 +409,13 @@ Answer:</textarea
351
  id="seed"
352
  name="seed"
353
  value="299792458"
354
- class="font-light border border-gray-700 text-right rounded-md p-2" />
 
355
  <button
356
  id="run"
357
  onclick="document.querySelector('#seed').value = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER)"
358
- class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-1 w-[50px] rounded disabled:bg-gray-300 disabled:cursor-not-allowed text-sm">
 
359
  Rand
360
  </button>
361
  </div>
@@ -364,11 +424,13 @@ Answer:</textarea
364
  <div>
365
  <h3 class="font-medium">Generation:</h3>
366
  <div
367
- class="min-h-[250px] bg-slate-100 text-gray-500 p-4 rounded-md flex flex-col gap-2">
 
368
  <div
369
  id="output-counter"
370
  hidden
371
- class="ml-auto font-semibold grid-rows-1 text-sm"></div>
 
372
  <p hidden id="output-generation" class="grid-rows-2"></p>
373
  <span id="output-status" class="m-auto font-light"
374
  >No output yet</span
 
13
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
14
  <link
15
  rel="stylesheet"
16
+ href="https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@11.8.0/build/styles/default.min.css"
17
+ />
18
  <style>
19
  @import url("https://fonts.googleapis.com/css2?family=Source+Code+Pro:wght@200;300;400&family=Source+Sans+3:wght@100;200;300;400;500;600;700;800;900&display=swap");
20
  html,
 
37
  <script type="module">
38
  import snarkdown from "https://cdn.skypack.dev/snarkdown";
39
  import hljs from "https://cdn.skypack.dev/highlight.js";
 
 
 
40
  // models base url
41
  const MODELS = {
42
  phi_1_5_quantized: {
43
  base_url:
44
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/refs%2Fpr%2F4/",
45
  model: "model-q4k.gguf",
46
+ tokenizer: "tokenizer.json",
47
+ config: "phi-1_5.json",
48
  quantized: true,
49
  seq_len: 2048,
50
  },
51
  phi_1_5_quantized_2: {
52
  base_url:
53
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/refs%2Fpr%2F4/",
54
  model: "model-q80.gguf",
55
+ tokenizer: "tokenizer.json",
56
+ config: "phi-1_5.json",
57
+ quantized: true,
58
+ seq_len: 2048,
59
+ },
60
+ puffin_phi_v2_quantized: {
61
+ base_url:
62
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/refs%2Fpr%2F4/",
63
+ model: "model-puffin-phi-v2-q4k.gguf",
64
+ tokenizer: "tokenizer-puffin-phi-v2.json",
65
+ config: "puffin-phi-v2.json",
66
+ quantized: true,
67
+ seq_len: 2048,
68
+ },
69
+ puffin_phi_v2_quantized_2: {
70
+ base_url:
71
+ "https://huggingface.co/lmz/candle-quantized-phi/resolve/refs%2Fpr%2F4/",
72
+ model: "model-puffin-phi-v2-q80.gguf",
73
+ tokenizer: "tokenizer-puffin-phi-v2.json",
74
+ config: "puffin-phi-v2.json",
75
  quantized: true,
76
  seq_len: 2048,
77
  },
 
85
  const modelID = getValue("model");
86
  const model = MODELS[modelID];
87
  const weightsURL = model.base_url + model.model;
88
+ const tokenizerURL = model.base_url + model.tokenizer;
89
+ const configURL = model.base_url + model.config;
90
 
91
  const prompt = getValue("prompt").trim();
92
  const temperature = getValue("temperature");
 
129
  phiWorker.postMessage({
130
  weightsURL,
131
  modelID,
132
+ tokenizerURL,
133
+ configURL,
134
  quantized: model.quantized,
135
  prompt,
136
  temp: temperature,
 
240
  <a
241
  href="https://arxiv.org/pdf/2309.05463.pdf#page=8"
242
  class="link"
243
+ target="_blank"
244
+ >
245
  technical report </a
246
  >.
247
  </p>
248
+ <p class="max-w-lg">
249
+ You can also try
250
+ <a
251
+ href="https://huggingface.co/teknium/Puffin-Phi-v2"
252
+ class="link"
253
+ target="_blank"
254
+ >Puffin-Phi V2
255
+ </a>
256
+ quantized version model, a fine-tuned version of Phi-1.5 on the
257
+ <a
258
+ href="https://huggingface.co/datasets/LDJnr/Puffin"
259
+ class="link"
260
+ target="_blank"
261
+ >Puffin dataset
262
+ </a>
263
+ </p>
264
  </div>
265
  <div>
266
  <p class="text-xs italic max-w-lg">
 
274
  <label for="model" class="font-medium">Models Options: </label>
275
  <select
276
  id="model"
277
+ class="border-2 border-gray-500 rounded-md font-light"
278
+ >
279
  <option value="phi_1_5_quantized" selected>
280
  phi 1.5 quantized q4k (800 MB)
281
  </option>
282
  <option value="phi_1_5_quantized_2">
283
  phi 1.5 quantized q80 (1.51 GB)
284
  </option>
285
+ <option value="puffin_phi_v2_quantized">
286
+ Puffin-Phi V2 quantized q4k (798 MB)
287
+ </option>
288
+ <option value="puffin_phi_v2_quantized_2">
289
+ Puffin-Phi V2 quantized q80 (1.50 GB)
290
+ </option>
291
  </select>
292
  </div>
293
  <form
294
  id="form"
295
+ class="flex text-normal px-1 py-1 border border-gray-700 rounded-md items-center"
296
+ >
297
  <input type="submit" hidden />
298
  <textarea
299
  type="text"
300
  id="prompt"
301
  class="font-light w-full px-3 py-2 mx-1 resize-none outline-none"
302
  oninput="this.style.height = 0;this.style.height = this.scrollHeight + 'px'"
303
+ placeholder="Add your prompt here..."
304
+ >
305
  Write a detailed analogy between mathematics and a lighthouse.
306
  Answer:</textarea
307
  >
 
310
  fill="none"
311
  xmlns="http://www.w3.org/2000/svg"
312
  width="40"
313
+ viewBox="0 0 70 40"
314
+ >
315
  <path opacity=".5" d="M39 .2v40.2" stroke="#1F2937" />
316
  <path
317
  d="M1.5 11.5 19 29.1m0-17.6L1.5 29.1"
318
  opacity=".5"
319
  stroke="#1F2937"
320
+ stroke-width="2"
321
+ />
322
  </svg>
323
  </button>
324
  <button
325
  id="run"
326
+ class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-2 w-16 rounded disabled:bg-gray-300 disabled:cursor-not-allowed"
327
+ >
328
  Run
329
  </button>
330
  </form>
 
343
  max="2048"
344
  step="1"
345
  value="200"
346
+ oninput="this.nextElementSibling.value = Number(this.value)"
347
+ />
348
  <output
349
+ class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md"
350
+ >
351
  200</output
352
  >
353
  <label class="text-sm font-medium" for="temperature"
 
361
  max="2"
362
  step="0.01"
363
  value="0.00"
364
+ oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)"
365
+ />
366
  <output
367
+ class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md"
368
+ >
369
  0.00</output
370
  >
371
  <label class="text-sm font-medium" for="top-p">Top-p</label>
 
377
  max="1"
378
  step="0.01"
379
  value="1.00"
380
+ oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)"
381
+ />
382
  <output
383
+ class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md"
384
+ >
385
  1.00</output
386
  >
387
 
 
397
  max="2"
398
  step="0.01"
399
  value="1.10"
400
+ oninput="this.nextElementSibling.value = Number(this.value).toFixed(2)"
401
+ />
402
  <output
403
  class="text-xs w-[50px] text-center font-light px-1 py-1 border border-gray-700 rounded-md"
404
  >1.10</output
 
409
  id="seed"
410
  name="seed"
411
  value="299792458"
412
+ class="font-light border border-gray-700 text-right rounded-md p-2"
413
+ />
414
  <button
415
  id="run"
416
  onclick="document.querySelector('#seed').value = Math.floor(Math.random() * Number.MAX_SAFE_INTEGER)"
417
+ class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-1 w-[50px] rounded disabled:bg-gray-300 disabled:cursor-not-allowed text-sm"
418
+ >
419
  Rand
420
  </button>
421
  </div>
 
424
  <div>
425
  <h3 class="font-medium">Generation:</h3>
426
  <div
427
+ class="min-h-[250px] bg-slate-100 text-gray-500 p-4 rounded-md flex flex-col gap-2"
428
+ >
429
  <div
430
  id="output-counter"
431
  hidden
432
+ class="ml-auto font-semibold grid-rows-1 text-sm"
433
+ ></div>
434
  <p hidden id="output-generation" class="grid-rows-2"></p>
435
  <span id="output-status" class="m-auto font-light"
436
  >No output yet</span
phiWorker.js CHANGED
@@ -15,21 +15,30 @@ async function fetchArrayBuffer(url) {
15
  class Phi {
16
  static instance = {};
17
 
18
- static async getInstance(weightsURL, modelID, tokenizerURL, quantized) {
 
 
 
 
 
 
19
  // load individual modelID only once
20
  if (!this.instance[modelID]) {
21
  await init();
22
 
23
  self.postMessage({ status: "loading", message: "Loading Model" });
24
 
25
- const [weightsArrayU8, tokenizerArrayU8] = await Promise.all([
26
- fetchArrayBuffer(weightsURL),
27
- fetchArrayBuffer(tokenizerURL),
28
- ]);
 
 
29
 
30
  this.instance[modelID] = new Model(
31
  weightsArrayU8,
32
  tokenizerArrayU8,
 
33
  quantized
34
  );
35
  }
@@ -52,6 +61,7 @@ async function generate(data) {
52
  weightsURL,
53
  modelID,
54
  tokenizerURL,
 
55
  quantized,
56
  prompt,
57
  temp,
@@ -66,6 +76,7 @@ async function generate(data) {
66
  weightsURL,
67
  modelID,
68
  tokenizerURL,
 
69
  quantized
70
  );
71
 
 
15
  class Phi {
16
  static instance = {};
17
 
18
+ static async getInstance(
19
+ weightsURL,
20
+ modelID,
21
+ tokenizerURL,
22
+ configURL,
23
+ quantized
24
+ ) {
25
  // load individual modelID only once
26
  if (!this.instance[modelID]) {
27
  await init();
28
 
29
  self.postMessage({ status: "loading", message: "Loading Model" });
30
 
31
+ const [weightsArrayU8, tokenizerArrayU8, configArrayU8] =
32
+ await Promise.all([
33
+ fetchArrayBuffer(weightsURL),
34
+ fetchArrayBuffer(tokenizerURL),
35
+ fetchArrayBuffer(configURL),
36
+ ]);
37
 
38
  this.instance[modelID] = new Model(
39
  weightsArrayU8,
40
  tokenizerArrayU8,
41
+ configArrayU8,
42
  quantized
43
  );
44
  }
 
61
  weightsURL,
62
  modelID,
63
  tokenizerURL,
64
+ configURL,
65
  quantized,
66
  prompt,
67
  temp,
 
76
  weightsURL,
77
  modelID,
78
  tokenizerURL,
79
+ configURL,
80
  quantized
81
  );
82