:boom: [Fix] auto switch endpoint for selected model
Browse files- networks/endpoint_storage.js +13 -6
- networks/llm_requester.js +22 -6
networks/endpoint_storage.js
CHANGED
@@ -9,6 +9,7 @@ class EndpointStorage {
|
|
9 |
constructor() {
|
10 |
this.init_database();
|
11 |
this.create_endpoint_and_api_key_items();
|
|
|
12 |
}
|
13 |
init_database() {
|
14 |
this.db = new Dexie("endpoints");
|
@@ -82,7 +83,7 @@ class EndpointStorage {
|
|
82 |
}
|
83 |
bind_endpoint_and_api_key_buttons(endpoint_and_api_key_item) {
|
84 |
let self = this;
|
85 |
-
console.log("endpoint_and_api_key_item:", endpoint_and_api_key_item);
|
86 |
let endpoint_submit_button = endpoint_and_api_key_item.find(
|
87 |
".submit-endpoint-button"
|
88 |
);
|
@@ -132,23 +133,29 @@ class EndpointStorage {
|
|
132 |
console.log("fetch available models for endpoint:", endpoint);
|
133 |
let available_models_requester = new AvailableModelsRequester(endpoint);
|
134 |
await available_models_requester.get();
|
135 |
-
|
136 |
-
available_models.forEach((value, index) => {
|
137 |
const option = new Option(value, value);
|
138 |
select.append(option);
|
139 |
});
|
140 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
// set default model
|
142 |
let default_model = "";
|
143 |
let storage_default_model = localStorage.getItem("default_model");
|
144 |
console.log("storage_default_model:", storage_default_model);
|
145 |
if (
|
146 |
storage_default_model &&
|
147 |
-
|
148 |
) {
|
149 |
default_model = storage_default_model;
|
150 |
-
} else if (
|
151 |
-
default_model =
|
152 |
localStorage.setItem("default_model", default_model);
|
153 |
} else {
|
154 |
default_model = "";
|
|
|
9 |
constructor() {
|
10 |
this.init_database();
|
11 |
this.create_endpoint_and_api_key_items();
|
12 |
+
this.fill_available_models_select("user-customized");
|
13 |
}
|
14 |
init_database() {
|
15 |
this.db = new Dexie("endpoints");
|
|
|
83 |
}
|
84 |
bind_endpoint_and_api_key_buttons(endpoint_and_api_key_item) {
|
85 |
let self = this;
|
86 |
+
// console.log("endpoint_and_api_key_item:", endpoint_and_api_key_item);
|
87 |
let endpoint_submit_button = endpoint_and_api_key_item.find(
|
88 |
".submit-endpoint-button"
|
89 |
);
|
|
|
133 |
console.log("fetch available models for endpoint:", endpoint);
|
134 |
let available_models_requester = new AvailableModelsRequester(endpoint);
|
135 |
await available_models_requester.get();
|
136 |
+
available_models[endpoint].forEach((value, index) => {
|
|
|
137 |
const option = new Option(value, value);
|
138 |
select.append(option);
|
139 |
});
|
140 |
|
141 |
+
let flatten_available_models = [];
|
142 |
+
Object.entries(available_models).forEach(([key, value]) => {
|
143 |
+
flatten_available_models.push(...value);
|
144 |
+
});
|
145 |
+
flatten_available_models = [...new Set(flatten_available_models)];
|
146 |
+
// console.log("flatten_available_models:", flatten_available_models);
|
147 |
+
|
148 |
// set default model
|
149 |
let default_model = "";
|
150 |
let storage_default_model = localStorage.getItem("default_model");
|
151 |
console.log("storage_default_model:", storage_default_model);
|
152 |
if (
|
153 |
storage_default_model &&
|
154 |
+
flatten_available_models.includes(storage_default_model)
|
155 |
) {
|
156 |
default_model = storage_default_model;
|
157 |
+
} else if (flatten_available_models) {
|
158 |
+
default_model = flatten_available_models[0];
|
159 |
localStorage.setItem("default_model", default_model);
|
160 |
} else {
|
161 |
default_model = "";
|
networks/llm_requester.js
CHANGED
@@ -32,7 +32,7 @@ export class ChatCompletionsRequester {
|
|
32 |
temperature !== null ? temperature : get_selected_temperature();
|
33 |
|
34 |
this.openai_endpoint =
|
35 |
-
openai_endpoint ||
|
36 |
this.backend_request_endpoint = "/chat/completions";
|
37 |
this.controller = new AbortController();
|
38 |
}
|
@@ -98,7 +98,7 @@ export class ChatCompletionsRequester {
|
|
98 |
}
|
99 |
}
|
100 |
|
101 |
-
export var available_models = ["notes"];
|
102 |
export class AvailableModelsRequester {
|
103 |
constructor(openai_endpoint) {
|
104 |
this.openai_endpoint = openai_endpoint;
|
@@ -136,13 +136,18 @@ export class AvailableModelsRequester {
|
|
136 |
.then((response) => response.json())
|
137 |
.then((response_json) => {
|
138 |
let data = response_json.data;
|
|
|
|
|
|
|
139 |
data.forEach((item) => {
|
140 |
-
if (
|
141 |
-
available_models.
|
|
|
|
|
|
|
|
|
142 |
}
|
143 |
});
|
144 |
-
available_models.sort();
|
145 |
-
available_models = [...new Set(available_models)];
|
146 |
console.log("available_models:", available_models);
|
147 |
})
|
148 |
.catch((error) => {
|
@@ -153,3 +158,14 @@ export class AvailableModelsRequester {
|
|
153 |
this.controller.abort();
|
154 |
}
|
155 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
temperature !== null ? temperature : get_selected_temperature();
|
33 |
|
34 |
this.openai_endpoint =
|
35 |
+
openai_endpoint || get_endpoint_by_model(this.model);
|
36 |
this.backend_request_endpoint = "/chat/completions";
|
37 |
this.controller = new AbortController();
|
38 |
}
|
|
|
98 |
}
|
99 |
}
|
100 |
|
101 |
+
export var available_models = { "user-customized": ["notes"] };
|
102 |
export class AvailableModelsRequester {
|
103 |
constructor(openai_endpoint) {
|
104 |
this.openai_endpoint = openai_endpoint;
|
|
|
136 |
.then((response) => response.json())
|
137 |
.then((response_json) => {
|
138 |
let data = response_json.data;
|
139 |
+
if (!(this.openai_endpoint in available_models)) {
|
140 |
+
available_models[this.openai_endpoint] = [];
|
141 |
+
}
|
142 |
data.forEach((item) => {
|
143 |
+
if (
|
144 |
+
!available_models[this.openai_endpoint].includes(
|
145 |
+
item.id
|
146 |
+
)
|
147 |
+
) {
|
148 |
+
available_models[this.openai_endpoint].push(item.id);
|
149 |
}
|
150 |
});
|
|
|
|
|
151 |
console.log("available_models:", available_models);
|
152 |
})
|
153 |
.catch((error) => {
|
|
|
158 |
this.controller.abort();
|
159 |
}
|
160 |
}
|
161 |
+
|
162 |
+
export function get_endpoint_by_model(model) {
|
163 |
+
let endpoint = "";
|
164 |
+
Object.entries(available_models).forEach(([key, value]) => {
|
165 |
+
if (value.includes(model)) {
|
166 |
+
endpoint = key;
|
167 |
+
return endpoint;
|
168 |
+
}
|
169 |
+
});
|
170 |
+
return endpoint;
|
171 |
+
}
|