Hansimov commited on
Commit
045ac52
1 Parent(s): d35920f

:recycle: [Refactor] ChatCompletionRequester: post

Browse files
Files changed (2) hide show
  1. networks/llm_requester.js +22 -22
  2. server.js +0 -2
networks/llm_requester.js CHANGED
@@ -74,29 +74,29 @@ export class ChatCompletionsRequester {
74
  create_messager("user", this.prompt);
75
  create_messager("assistant", "", this.model, this.temperature);
76
  }
77
- post() {
78
  this.construct_request_params();
79
- return fetch(this.backend_request_endpoint, this.backend_request_params)
80
- .then((response) => response.body)
81
- .then((rb) => {
82
- const reader = rb.getReader();
83
- let buffer = "";
84
- return reader.read().then(function process({ done, value }) {
85
- if (done) {
86
- return;
87
- }
88
- buffer += stringify_stream_bytes(value);
89
- let boundary = buffer.lastIndexOf("\n");
90
- if (boundary !== -1) {
91
- let input = buffer.substring(0, boundary);
92
- buffer = buffer.substring(boundary + 1);
93
- let json_chunks = jsonize_stream_data(input);
94
- update_message(json_chunks);
95
- }
96
- return reader.read().then(process);
97
- });
98
- })
99
- .catch((error) => console.error("Error:", error));
100
  }
101
  stop() {
102
  this.controller.abort();
 
74
  create_messager("user", this.prompt);
75
  create_messager("assistant", "", this.model, this.temperature);
76
  }
77
+ async post() {
78
  this.construct_request_params();
79
+ const response = await fetch(
80
+ this.backend_request_endpoint,
81
+ this.backend_request_params
82
+ );
83
+ const reader = response.body.getReader();
84
+ let buffer = "";
85
+ return reader.read().then(function process({ done, value }) {
86
+ if (done) {
87
+ return;
88
+ }
89
+ buffer += stringify_stream_bytes(value);
90
+ let boundary = buffer.lastIndexOf("\n");
91
+ if (boundary !== -1) {
92
+ let input = buffer.substring(0, boundary);
93
+ buffer = buffer.substring(boundary + 1);
94
+ let json_chunks = jsonize_stream_data(input);
95
+ console.log(json_chunks);
96
+ update_message(json_chunks);
97
+ }
98
+ return reader.read().then(process);
99
+ });
100
  }
101
  stop() {
102
  this.controller.abort();
server.js CHANGED
@@ -28,7 +28,6 @@ app.post("/chat/completions", async (req, res) => {
28
  headers: openai_request_headers,
29
  responseType: "stream",
30
  });
31
-
32
  response.data.pipe(res);
33
  } catch (error) {
34
  console.error(error);
@@ -50,7 +49,6 @@ app.post("/models", async (req, res) => {
50
  headers: openai_request_headers,
51
  });
52
  res.json(response.data);
53
-
54
  } catch (error) {
55
  console.error(error);
56
  res.status(500).json({ error: "Failed to request OpenAI Endpoint" });
 
28
  headers: openai_request_headers,
29
  responseType: "stream",
30
  });
 
31
  response.data.pipe(res);
32
  } catch (error) {
33
  console.error(error);
 
49
  headers: openai_request_headers,
50
  });
51
  res.json(response.data);
 
52
  } catch (error) {
53
  console.error(error);
54
  res.status(500).json({ error: "Failed to request OpenAI Endpoint" });