NyxKrage commited on
Commit
0b4ceff
1 Parent(s): 8599799

Update index.html

Browse files
Files changed (1) hide show
  1. index.html +35 -32
index.html CHANGED
@@ -142,40 +142,43 @@
142
  }
143
 
144
  async function calculateSizes(format) {
145
- const model_config = await modelConfig(document.getElementById("modelsearch").value)
146
- const context = parseInt(document.getElementById("contextsize").value)
147
- let bsz = 512
148
- let fp8_cache = false
149
- let bpw = 0
150
- if (format === "gguf") {
151
- bsz = parseInt(document.getElementById("batchsize").value)
152
- bpw = gguf_quants[document.getElementById("quantsize").innerText]
153
-
154
- } else if (format == "exl2") {
155
- fp8_cache = document.getElementById("fp8cache").checked
156
- bpw = Number.parseFloat(document.getElementById("bpw").value)
157
- }
158
-
159
- const model_size = modelSize(model_config, bpw)
160
- const context_size = contextSize(context, model_config, bsz, fp8_cache)
161
- const total_size = ((model_size + context_size) / 1e+9)
162
- document.getElementById("resultmodel").innerText = (model_size / 1e+9).toFixed(2)
163
- document.getElementById("resultcontext").innerText = (context_size / 1e+9).toFixed(2)
164
- const result_total_el = document.getElementById("resulttotal");
165
- result_total_el.innerText = total_size.toFixed(2)
166
-
167
- const gpu = document.getElementById("gpusearch").value
168
- if (gpu !== "") {
169
- const vram = parseFloat(gpu.split("-")[1].replace("GB", "").trim())
170
- if (vram - total_size > 0.5) {
171
- result_total_el.style.backgroundColor = "#bef264"
172
- } else if (vram - total_size > 0) {
173
- result_total_el.style.backgroundColor = "#facc15"
174
- } else {
175
- result_total_el.style.backgroundColor = "#ef4444"
176
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  }
178
-
179
  }
180
  </script>
181
  <link href="./styles.css" rel="stylesheet">
 
142
  }
143
 
144
  async function calculateSizes(format) {
145
+ try {
146
+ const model_config = await modelConfig(document.getElementById("modelsearch").value)
147
+ const context = parseInt(document.getElementById("contextsize").value)
148
+ let bsz = 512
149
+ let fp8_cache = false
150
+ let bpw = 0
151
+ if (format === "gguf") {
152
+ bsz = parseInt(document.getElementById("batchsize").value)
153
+ bpw = gguf_quants[document.getElementById("quantsize").innerText]
154
+
155
+ } else if (format == "exl2") {
156
+ fp8_cache = document.getElementById("fp8cache").checked
157
+ bpw = Number.parseFloat(document.getElementById("bpw").value)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
  }
159
+
160
+ const model_size = modelSize(model_config, bpw)
161
+ const context_size = contextSize(context, model_config, bsz, fp8_cache)
162
+ const total_size = ((model_size + context_size) / 1e+9)
163
+ document.getElementById("resultmodel").innerText = (model_size / 1e+9).toFixed(2)
164
+ document.getElementById("resultcontext").innerText = (context_size / 1e+9).toFixed(2)
165
+ const result_total_el = document.getElementById("resulttotal");
166
+ result_total_el.innerText = total_size.toFixed(2)
167
+
168
+ const gpu = document.getElementById("gpusearch").value
169
+ if (gpu !== "") {
170
+ const vram = parseFloat(gpu.split("-")[1].replace("GB", "").trim())
171
+ if (vram - total_size > 0.5) {
172
+ result_total_el.style.backgroundColor = "#bef264"
173
+ } else if (vram - total_size > 0) {
174
+ result_total_el.style.backgroundColor = "#facc15"
175
+ } else {
176
+ result_total_el.style.backgroundColor = "#ef4444"
177
+ }
178
+ }
179
+ } catch(e) {
180
+ alert(e);
181
  }
 
182
  }
183
  </script>
184
  <link href="./styles.css" rel="stylesheet">