:boom: [Fix] ChatCompletionsRequester: select endpoint and model
Browse files
networks/llm_requester.js
CHANGED
@@ -18,15 +18,20 @@ export class ChatCompletionsRequester {
|
|
18 |
openai_endpoint = null
|
19 |
) {
|
20 |
this.prompt = prompt;
|
21 |
-
this.
|
|
|
|
|
22 |
this.temperature =
|
23 |
temperature !== null ? temperature : get_selected_temperature();
|
24 |
-
|
25 |
-
this.openai_endpoint =
|
26 |
-
openai_endpoint || get_endpoint_by_model(this.model);
|
27 |
this.backend_request_endpoint = "/chat/completions";
|
28 |
this.controller = new AbortController();
|
29 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
construct_openai_request_headers() {
|
31 |
this.backend_request_headers = {
|
32 |
"Content-Type": "application/json",
|
|
|
18 |
openai_endpoint = null
|
19 |
) {
|
20 |
this.prompt = prompt;
|
21 |
+
this.openai_endpoint =
|
22 |
+
openai_endpoint || this.extract_endpoint_and_model()[0];
|
23 |
+
this.model = model || this.extract_endpoint_and_model()[1];
|
24 |
this.temperature =
|
25 |
temperature !== null ? temperature : get_selected_temperature();
|
|
|
|
|
|
|
26 |
this.backend_request_endpoint = "/chat/completions";
|
27 |
this.controller = new AbortController();
|
28 |
}
|
29 |
+
extract_endpoint_and_model() {
|
30 |
+
let model_id_with_endpoint = get_selected_llm_model();
|
31 |
+
this.openai_endpoint = model_id_with_endpoint.split("|")[0];
|
32 |
+
this.model = model_id_with_endpoint.split("|")[1];
|
33 |
+
return [this.openai_endpoint, this.model];
|
34 |
+
}
|
35 |
construct_openai_request_headers() {
|
36 |
this.backend_request_headers = {
|
37 |
"Content-Type": "application/json",
|
storages/endpoint_storage.js
CHANGED
@@ -188,7 +188,7 @@ class EndpointStorage {
|
|
188 |
).hostname
|
189 |
.split(".")[0]
|
190 |
.split("-")[0];
|
191 |
-
let model_id_with_endpoint = `${row.endpoint}
|
192 |
|
193 |
model_name = `${model_id} (${endpoint_hostname})`;
|
194 |
const option = new Option(
|
@@ -208,7 +208,7 @@ class EndpointStorage {
|
|
208 |
|
209 |
set_default_model() {
|
210 |
let storage_default_model = localStorage.getItem("default_model");
|
211 |
-
// format of storage_default_model is `{endpoint}
|
212 |
// if storage_default_model is null, or not in the available_models_select,
|
213 |
// set as the first one of available_models_select
|
214 |
let select = $("#available-models-select");
|
|
|
188 |
).hostname
|
189 |
.split(".")[0]
|
190 |
.split("-")[0];
|
191 |
+
let model_id_with_endpoint = `${row.endpoint}|${model_id}`;
|
192 |
|
193 |
model_name = `${model_id} (${endpoint_hostname})`;
|
194 |
const option = new Option(
|
|
|
208 |
|
209 |
set_default_model() {
|
210 |
let storage_default_model = localStorage.getItem("default_model");
|
211 |
+
// format of storage_default_model is `{endpoint}|{model_id}`
|
212 |
// if storage_default_model is null, or not in the available_models_select,
|
213 |
// set as the first one of available_models_select
|
214 |
let select = $("#available-models-select");
|