Update index.js
Browse files
index.js
CHANGED
@@ -77,12 +77,10 @@ export async function imageTextToText(
|
|
77 |
let ortSessionA, ortSessionB, ortSessionC, ortSessionD, ortSessionE;
|
78 |
|
79 |
const prompt_head_len = new Tensor("int64", new BigInt64Array([5n]), [1]);
|
80 |
-
logger.tensor("prompt_head_len", prompt_head_len);
|
81 |
|
82 |
let position_ids;
|
83 |
let num_decode = 0;
|
84 |
let history_len = new Tensor("int64", new BigInt64Array([0n]), [1]);
|
85 |
-
logger.tensor("history_len", history_len);
|
86 |
|
87 |
var pos_factor_v = BigInt(1 - IMAGE_EMBED_SIZE + WIDTH_FACTOR);
|
88 |
|
@@ -111,8 +109,7 @@ export async function imageTextToText(
|
|
111 |
);
|
112 |
|
113 |
let pos_factor = new Tensor("float16", new Uint16Array([0]), [1]);
|
114 |
-
|
115 |
-
|
116 |
logger.groupCollapsed("[TOKENIZATION] Processing prompt...");
|
117 |
const tokenizer = await AutoTokenizer.from_pretrained(BASE_MODEL);
|
118 |
const prompt = `\n<|im_start|>user\n<|vision_start|><|vision_end|>${query}<|im_end|>\n<|im_start|>assistant\n`;
|
@@ -137,8 +134,6 @@ export async function imageTextToText(
|
|
137 |
|
138 |
const dummy = new ort.Tensor("int32", new Int32Array([0]), []);
|
139 |
|
140 |
-
if (!ortSessionB) {
|
141 |
-
}
|
142 |
let { hidden_states } = await ortSessionB.run({
|
143 |
input_ids: input_ids,
|
144 |
ids_len: ids_len,
|
|
|
77 |
let ortSessionA, ortSessionB, ortSessionC, ortSessionD, ortSessionE;
|
78 |
|
79 |
const prompt_head_len = new Tensor("int64", new BigInt64Array([5n]), [1]);
|
|
|
80 |
|
81 |
let position_ids;
|
82 |
let num_decode = 0;
|
83 |
let history_len = new Tensor("int64", new BigInt64Array([0n]), [1]);
|
|
|
84 |
|
85 |
var pos_factor_v = BigInt(1 - IMAGE_EMBED_SIZE + WIDTH_FACTOR);
|
86 |
|
|
|
109 |
);
|
110 |
|
111 |
let pos_factor = new Tensor("float16", new Uint16Array([0]), [1]);
|
112 |
+
|
|
|
113 |
logger.groupCollapsed("[TOKENIZATION] Processing prompt...");
|
114 |
const tokenizer = await AutoTokenizer.from_pretrained(BASE_MODEL);
|
115 |
const prompt = `\n<|im_start|>user\n<|vision_start|><|vision_end|>${query}<|im_end|>\n<|im_start|>assistant\n`;
|
|
|
134 |
|
135 |
const dummy = new ort.Tensor("int32", new Int32Array([0]), []);
|
136 |
|
|
|
|
|
137 |
let { hidden_states } = await ortSessionB.run({
|
138 |
input_ids: input_ids,
|
139 |
ids_len: ids_len,
|