icebear0828 Claude Opus 4.6 commited on
Commit
ebe8094
Β·
1 Parent(s): a931669

feat: support developer role and array content in OpenAI protocol

Browse files

- Add "developer" to role enum (treated as system/instructions)
- Accept array content parts (extract text, ignore non-text)
- Flatten array content before passing to session manager

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

src/routes/chat.ts CHANGED
@@ -113,7 +113,12 @@ export function createChatRoutes(
113
  cookieJar,
114
  {
115
  codexRequest,
116
- sessionMessages: req.messages,
 
 
 
 
 
117
  model: codexRequest.model,
118
  isStreaming: req.stream,
119
  },
 
113
  cookieJar,
114
  {
115
  codexRequest,
116
+ sessionMessages: req.messages.map((m) => ({
117
+ role: m.role,
118
+ content: typeof m.content === "string"
119
+ ? m.content
120
+ : m.content.filter((p) => p.type === "text" && p.text).map((p) => p.text!).join("\n"),
121
+ })),
122
  model: codexRequest.model,
123
  isStreaming: req.stream,
124
  },
src/translation/openai-to-codex.ts CHANGED
@@ -2,7 +2,7 @@
2
  * Translate OpenAI Chat Completions request β†’ Codex Responses API request.
3
  */
4
 
5
- import type { ChatCompletionRequest } from "../types/openai.js";
6
  import type {
7
  CodexResponsesRequest,
8
  CodexInputItem,
@@ -11,11 +11,20 @@ import { resolveModelId, getModelInfo } from "../routes/models.js";
11
  import { getConfig } from "../config.js";
12
  import { buildInstructions } from "./shared-utils.js";
13
 
 
 
 
 
 
 
 
 
 
14
  /**
15
  * Convert a ChatCompletionRequest to a CodexResponsesRequest.
16
  *
17
  * Mapping:
18
- * - system messages β†’ instructions field
19
  * - user/assistant messages β†’ input array
20
  * - model β†’ resolved model ID
21
  * - reasoning_effort β†’ reasoning.effort
@@ -24,20 +33,22 @@ export function translateToCodexRequest(
24
  req: ChatCompletionRequest,
25
  previousResponseId?: string | null,
26
  ): CodexResponsesRequest {
27
- // Collect system messages as instructions
28
- const systemMessages = req.messages.filter((m) => m.role === "system");
 
 
29
  const userInstructions =
30
- systemMessages.map((m) => m.content).join("\n\n") ||
31
  "You are a helpful assistant.";
32
  const instructions = buildInstructions(userInstructions);
33
 
34
  // Build input items from non-system messages
35
  const input: CodexInputItem[] = [];
36
  for (const msg of req.messages) {
37
- if (msg.role === "system") continue;
38
  input.push({
39
  role: msg.role as "user" | "assistant",
40
- content: msg.content,
41
  });
42
  }
43
 
 
2
  * Translate OpenAI Chat Completions request β†’ Codex Responses API request.
3
  */
4
 
5
+ import type { ChatCompletionRequest, ChatMessage } from "../types/openai.js";
6
  import type {
7
  CodexResponsesRequest,
8
  CodexInputItem,
 
11
  import { getConfig } from "../config.js";
12
  import { buildInstructions } from "./shared-utils.js";
13
 
14
+ /** Extract plain text from content (string or array of content parts). */
15
+ function extractText(content: ChatMessage["content"]): string {
16
+ if (typeof content === "string") return content;
17
+ return content
18
+ .filter((p) => p.type === "text" && p.text)
19
+ .map((p) => p.text!)
20
+ .join("\n");
21
+ }
22
+
23
  /**
24
  * Convert a ChatCompletionRequest to a CodexResponsesRequest.
25
  *
26
  * Mapping:
27
+ * - system/developer messages β†’ instructions field
28
  * - user/assistant messages β†’ input array
29
  * - model β†’ resolved model ID
30
  * - reasoning_effort β†’ reasoning.effort
 
33
  req: ChatCompletionRequest,
34
  previousResponseId?: string | null,
35
  ): CodexResponsesRequest {
36
+ // Collect system/developer messages as instructions
37
+ const systemMessages = req.messages.filter(
38
+ (m) => m.role === "system" || m.role === "developer",
39
+ );
40
  const userInstructions =
41
+ systemMessages.map((m) => extractText(m.content)).join("\n\n") ||
42
  "You are a helpful assistant.";
43
  const instructions = buildInstructions(userInstructions);
44
 
45
  // Build input items from non-system messages
46
  const input: CodexInputItem[] = [];
47
  for (const msg of req.messages) {
48
+ if (msg.role === "system" || msg.role === "developer") continue;
49
  input.push({
50
  role: msg.role as "user" | "assistant",
51
+ content: extractText(msg.content),
52
  });
53
  }
54
 
src/types/openai.ts CHANGED
@@ -5,9 +5,14 @@ import { z } from "zod";
5
 
6
  // --- Request ---
7
 
 
 
 
 
 
8
  export const ChatMessageSchema = z.object({
9
- role: z.enum(["system", "user", "assistant"]),
10
- content: z.string(),
11
  name: z.string().optional(),
12
  });
13
 
 
5
 
6
  // --- Request ---
7
 
8
+ const ContentPartSchema = z.object({
9
+ type: z.string(),
10
+ text: z.string().optional(),
11
+ }).passthrough();
12
+
13
  export const ChatMessageSchema = z.object({
14
+ role: z.enum(["system", "developer", "user", "assistant"]),
15
+ content: z.union([z.string(), z.array(ContentPartSchema)]),
16
  name: z.string().optional(),
17
  });
18