Hansimov commited on
Commit
eb28af4
1 Parent(s): 2e70541

:gem: [WIP] Store endpoint and api key in localStorage

Browse files
apps/llm_mixer/index.html CHANGED
@@ -73,13 +73,31 @@
73
  </button>
74
  </div>
75
  </div>
76
- <div class="mt-3">
77
- <button id="prev-user-input" class="btn btn-dark">
78
- <span class="fa fa-arrow-left"></span>
79
- </button>
80
- <button id="next-user-input" class="btn btn-dark">
81
- <span class="fa fa-arrow-right"></span>
82
- </button>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
  </div>
84
  </div>
85
  </div>
 
73
  </button>
74
  </div>
75
  </div>
76
+ <div class="row mt-2 no-gutters">
77
+ <div class="col-auto pl-3 pr-0">
78
+ <button id="prev-user-input" class="btn btn-dark">
79
+ <span class="fa fa-arrow-left"></span>
80
+ </button>
81
+ <button id="next-user-input" class="btn btn-dark">
82
+ <span class="fa fa-arrow-right"></span>
83
+ </button>
84
+ </div>
85
+ <form class="col px-1">
86
+ <input
87
+ id="openai-endpoint"
88
+ class="form-control"
89
+ rows="1"
90
+ placeholder="Endpoint ..."
91
+ ></input>
92
+ </form>
93
+ <form class="col px-1">
94
+ <input
95
+ id="openai-api-key"
96
+ class="form-control"
97
+ rows="1"
98
+ placeholder="API Key ..."
99
+ ></input>
100
+ </form>
101
  </div>
102
  </div>
103
  </div>
apps/llm_mixer/js/llm_models_loader.js CHANGED
@@ -34,3 +34,32 @@ export async function setup_temperature_on_select(default_option = null) {
34
  }
35
  console.log(`Default temperature: ${select.val()}`);
36
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  }
35
  console.log(`Default temperature: ${select.val()}`);
36
  }
37
+
38
+ export function setup_endpoint_and_key() {
39
+ if (localStorage.getItem("openai_endpoint")) {
40
+ $("#openai-endpoint").val(localStorage.getItem("openai_endpoint"));
41
+ console.log("GET: OpenAI Endpoint!");
42
+ } else {
43
+ console.log("NULL: OpenAI Endpoint!");
44
+ $("#openai-endpoint").on("submit", function (event) {
45
+ event.preventDefault();
46
+ localStorage.setItem(
47
+ "openai_endpoint",
48
+ $("#openai-endpoint").val()
49
+ );
50
+ console.log("SET: OpenAI Endpoint!");
51
+ });
52
+ }
53
+
54
+ if (localStorage.getItem("openai_api_key")) {
55
+ $("#openai-api-key").val(localStorage.getItem("openai_api_key"));
56
+ console.log("GET: OpenAI API Key!");
57
+ } else {
58
+ console.log("NULL: OpenAI API Key!");
59
+ $("#openai-api-key").on("submit", function (event) {
60
+ event.preventDefault();
61
+ localStorage.setItem("openai_api_key", $("#openai-api-key").val());
62
+ console.log("SET: OpenAI API Key!");
63
+ });
64
+ }
65
+ }
apps/llm_mixer/js/llm_requester.js CHANGED
@@ -2,7 +2,7 @@ import {
2
  jsonize_stream_data,
3
  stringify_stream_bytes,
4
  } from "./stream_jsonizer.js";
5
- import * as secrets from "./secrets.js";
6
  import {
7
  update_message,
8
  create_messager,
@@ -23,8 +23,8 @@ export class ChatCompletionsRequester {
23
  this.model = model || get_selected_llm_model() || "gpt-turbo-3.5";
24
  this.temperature =
25
  temperature !== null ? temperature : get_selected_temperature();
26
- this.endpoint = endpoint || secrets.openai_endpoint;
27
- this.cors_proxy = cors_proxy || secrets.cors_proxy;
28
  this.request_endpoint = this.cors_proxy + this.endpoint;
29
  this.controller = new AbortController();
30
  }
@@ -34,7 +34,7 @@ export class ChatCompletionsRequester {
34
  construct_request_headers() {
35
  this.request_headers = {
36
  "Content-Type": "application/json",
37
- Authorization: `Bearer ${secrets.openai_api_key}`,
38
  };
39
  }
40
  construct_request_body() {
 
2
  jsonize_stream_data,
3
  stringify_stream_bytes,
4
  } from "./stream_jsonizer.js";
5
+ // import * as secrets from "./secrets.js";
6
  import {
7
  update_message,
8
  create_messager,
 
23
  this.model = model || get_selected_llm_model() || "gpt-turbo-3.5";
24
  this.temperature =
25
  temperature !== null ? temperature : get_selected_temperature();
26
+ this.endpoint = endpoint || localStorage.getItem("openai_endpoint");
27
+ this.cors_proxy = cors_proxy || "https://cors-anywhere.herokuapp.com/";
28
  this.request_endpoint = this.cors_proxy + this.endpoint;
29
  this.controller = new AbortController();
30
  }
 
34
  construct_request_headers() {
35
  this.request_headers = {
36
  "Content-Type": "application/json",
37
+ Authorization: `Bearer ${localStorage.getItem("openai_api_key")}`,
38
  };
39
  }
40
  construct_request_body() {
apps/llm_mixer/js/main.js CHANGED
@@ -1,6 +1,7 @@
1
  import {
2
  setup_available_models_on_select,
3
  setup_temperature_on_select,
 
4
  } from "./llm_models_loader.js";
5
  import { bind_chat_buttons } from "./buttons_binder.js";
6
  var user_input_history = [];
@@ -23,6 +24,7 @@ function auto_resize_user_input() {
23
  }
24
 
25
  function setup_interactive_components() {
 
26
  setup_available_models_on_select();
27
  setup_temperature_on_select();
28
  bind_chat_buttons();
 
1
  import {
2
  setup_available_models_on_select,
3
  setup_temperature_on_select,
4
+ setup_endpoint_and_key,
5
  } from "./llm_models_loader.js";
6
  import { bind_chat_buttons } from "./buttons_binder.js";
7
  var user_input_history = [];
 
24
  }
25
 
26
  function setup_interactive_components() {
27
+ setup_endpoint_and_key();
28
  setup_available_models_on_select();
29
  setup_temperature_on_select();
30
  bind_chat_buttons();