// Copyright Envoy AI Gateway Authors
// SPDX-License-Identifier: Apache-2.0
// The full text of the Apache license is available in the LICENSE file at
// the root of the repo.

package translator

import (
	"encoding/json"
	"fmt"
	"testing"

	"github.com/google/go-cmp/cmp"
	"github.com/google/go-cmp/cmp/cmpopts"
	openaigo "github.com/openai/openai-go/v2"
	"github.com/stretchr/testify/assert"
	"github.com/stretchr/testify/require"
	"google.golang.org/genai"
	"k8s.io/utils/ptr"

	"github.com/envoyproxy/ai-gateway/internal/apischema/openai"
	"github.com/envoyproxy/ai-gateway/internal/internalapi"
)

func TestOpenAIMessagesToGeminiContents(t *testing.T) {
	tests := []struct {
		name                      string
		messages                  []openai.ChatCompletionMessageParamUnion
		expectedErrorMsg          string
		expectedContents          []genai.Content
		expectedSystemInstruction *genai.Content
	}{
		{
			name: "happy-path",
			messages: []openai.ChatCompletionMessageParamUnion{
				{
					OfDeveloper: &openai.ChatCompletionDeveloperMessageParam{
						Role:    openai.ChatMessageRoleDeveloper,
						Content: openai.ContentUnion{Value: "This is a developer message"},
					},
				},
				{
					OfSystem: &openai.ChatCompletionSystemMessageParam{
						Role:    openai.ChatMessageRoleSystem,
						Content: openai.ContentUnion{Value: "This is a system message"},
					},
				},
				{
					OfUser: &openai.ChatCompletionUserMessageParam{
						Role:    openai.ChatMessageRoleUser,
						Content: openai.StringOrUserRoleContentUnion{Value: "This is a user message"},
					},
				},
				{
					OfAssistant: &openai.ChatCompletionAssistantMessageParam{
						Role:    openai.ChatMessageRoleAssistant,
						Audio:   openai.ChatCompletionAssistantMessageParamAudio{},
						Content: openai.StringOrAssistantRoleContentUnion{Value: "This is a assistant message"},
						ToolCalls: []openai.ChatCompletionMessageToolCallParam{
							{
								ID: ptr.To("tool_call_1"),
								Function: openai.ChatCompletionMessageToolCallFunctionParam{
									Name:      "example_tool",
									Arguments: "{\"param1\":\"value1\"}",
								},
								Type: openai.ChatCompletionMessageToolCallTypeFunction,
							},
						},
					},
				},
				{
					OfTool: &openai.ChatCompletionToolMessageParam{
						ToolCallID: "tool_call_1",
						Content:    openai.ContentUnion{Value: "This is a message from the example_tool"},
					},
				},
			},
			expectedContents: []genai.Content{
				{
					Parts: []*genai.Part{
						{Text: "This is a user message"},
					},
					Role: genai.RoleUser,
				},
				{
					Role: genai.RoleModel,
					Parts: []*genai.Part{
						{
							FunctionCall: &genai.FunctionCall{
								Name: "example_tool",
								Args: map[string]any{
									"param1": "value1",
								},
							},
						},
						{Text: "This is a assistant message"},
					},
				},
				{
					Role: genai.RoleUser,
					Parts: []*genai.Part{
						{
							FunctionResponse: &genai.FunctionResponse{
								Name: "example_tool",
								Response: map[string]any{
									"output": "This is a message from the example_tool",
								},
							},
						},
					},
				},
			},
			expectedSystemInstruction: &genai.Content{
				Parts: []*genai.Part{
					{Text: "This is a developer message"},
					{Text: "This is a system message"},
				},
			},
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			contents, systemInstruction, err := openAIMessagesToGeminiContents(tc.messages, "gemini-3-pro")

			if tc.expectedErrorMsg != "" || err != nil {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrorMsg)
			} else {
				if d := cmp.Diff(tc.expectedContents, contents); d != "" {
					t.Errorf("Gemini Contents mismatch (-want +got):\n%s", d)
				}
				if d := cmp.Diff(tc.expectedSystemInstruction, systemInstruction); d != "" {
					t.Errorf("SystemInstruction mismatch (-want +got):\n%s", d)
				}
			}
		})
	}
}

// TestAssistantMsgToGeminiParts tests the assistantMsgToGeminiParts function.
func TestAssistantMsgToGeminiParts(t *testing.T) {
	tests := []struct {
		name              string
		msg               openai.ChatCompletionAssistantMessageParam
		expectedParts     []*genai.Part
		expectedToolCalls map[string]string
		expectedErrorMsg  string
	}{
		{
			name: "empty text content",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: "",
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts:     nil,
			expectedToolCalls: map[string]string{},
		},
		{
			name: "invalid content type",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: 10, // Invalid type.
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts:     nil,
			expectedToolCalls: map[string]string{},
			expectedErrorMsg:  "unsupported content type in assistant message: int",
		},
		{
			name: "simple text content",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: "Hello, I'm an AI assistant",
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts: []*genai.Part{
				genai.NewPartFromText("Hello, I'm an AI assistant"),
			},
			expectedToolCalls: map[string]string{},
		},
		// Currently noting is returned for refusal messages.
		{
			name: "text content with refusal message",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type:    openai.ChatCompletionAssistantMessageParamContentTypeRefusal,
							Refusal: ptr.To("Response was refused"),
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts:     nil,
			expectedToolCalls: map[string]string{},
		},
		{
			name: "content with an array of texts",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeText,
							Text: ptr.To("Hello, I'm an AI assistant"),
						},
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeText,
							Text: ptr.To("How can I assist you today?"),
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts: []*genai.Part{
				genai.NewPartFromText("Hello, I'm an AI assistant"),
				genai.NewPartFromText("How can I assist you today?"),
			},
			expectedToolCalls: map[string]string{},
		},
		{
			name: "tool calls without content",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: "",
				},
				Role: openai.ChatMessageRoleAssistant,
				ToolCalls: []openai.ChatCompletionMessageToolCallParam{
					{
						ID: ptr.To("call_123"),
						Function: openai.ChatCompletionMessageToolCallFunctionParam{
							Name:      "get_weather",
							Arguments: `{"location":"New York","unit":"celsius"}`,
						},
						Type: openai.ChatCompletionMessageToolCallTypeFunction,
					},
				},
			},
			expectedParts: []*genai.Part{
				{
					FunctionCall: &genai.FunctionCall{
						Args: map[string]any{"location": "New York", "unit": "celsius"},
						Name: "get_weather",
					},
				},
			},
			expectedToolCalls: map[string]string{
				"call_123": "get_weather",
			},
		},
		{
			name: "multiple tool calls with content",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: "I'll help you with that",
				},
				Role: openai.ChatMessageRoleAssistant,
				ToolCalls: []openai.ChatCompletionMessageToolCallParam{
					{
						ID: ptr.To("call_789"),
						Function: openai.ChatCompletionMessageToolCallFunctionParam{
							Name:      "get_weather",
							Arguments: `{"location":"New York","unit":"celsius"}`,
						},
						Type: openai.ChatCompletionMessageToolCallTypeFunction,
					},
					{
						ID: ptr.To("call_abc"),
						Function: openai.ChatCompletionMessageToolCallFunctionParam{
							Name:      "get_time",
							Arguments: `{"timezone":"EST"}`,
						},
						Type: openai.ChatCompletionMessageToolCallTypeFunction,
					},
				},
			},
			expectedParts: []*genai.Part{
				genai.NewPartFromFunctionCall("get_weather", map[string]any{
					"location": "New York",
					"unit":     "celsius",
				}),
				genai.NewPartFromFunctionCall("get_time", map[string]any{
					"timezone": "EST",
				}),
				genai.NewPartFromText("I'll help you with that"),
			},
			expectedToolCalls: map[string]string{
				"call_789": "get_weather",
				"call_abc": "get_time",
			},
		},
		{
			name: "invalid tool call arguments",
			msg: openai.ChatCompletionAssistantMessageParam{
				Role: openai.ChatMessageRoleAssistant,
				ToolCalls: []openai.ChatCompletionMessageToolCallParam{
					{
						ID: ptr.To("call_def"),
						Function: openai.ChatCompletionMessageToolCallFunctionParam{
							Name:      "get_weather",
							Arguments: `{"location":"New York"`, // Invalid JSON.
						},
						Type: openai.ChatCompletionMessageToolCallTypeFunction,
					},
				},
			},
			expectedErrorMsg: "function arguments should be valid json string",
		},
		{
			name: "nil content",
			msg: openai.ChatCompletionAssistantMessageParam{
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts:     nil,
			expectedToolCalls: map[string]string{},
		},
		{
			name: "thinking content with valid text",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeThinking,
							Text: ptr.To("Let me think step by step..."),
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts: []*genai.Part{
				{
					Text:    "Let me think step by step...",
					Thought: true,
				},
			},
			expectedToolCalls: map[string]string{},
		},
		{
			name: "thinking content with nil text",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeThinking,
							Text: nil,
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts:     nil,
			expectedToolCalls: map[string]string{},
		},
		{
			name: "thinking content with empty text",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeThinking,
							Text: ptr.To(""),
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts:     nil,
			expectedToolCalls: map[string]string{},
		},
		{
			name: "mixed content with thinking and regular text",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeThinking,
							Text: ptr.To("First, I need to analyze this problem..."),
						},
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeText,
							Text: ptr.To("Based on my analysis, here's the answer."),
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
			},
			expectedParts: []*genai.Part{
				{
					Text:    "First, I need to analyze this problem...",
					Thought: true,
				},
				genai.NewPartFromText("Based on my analysis, here's the answer."),
			},
			expectedToolCalls: map[string]string{},
		},
		{
			name: "thinking content mixed with tool calls",
			msg: openai.ChatCompletionAssistantMessageParam{
				Content: openai.StringOrAssistantRoleContentUnion{
					Value: []openai.ChatCompletionAssistantMessageParamContent{
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeThinking,
							Text: ptr.To("I need to call a function to get the weather"),
						},
						{
							Type: openai.ChatCompletionAssistantMessageParamContentTypeText,
							Text: ptr.To("Let me get the weather for you"),
						},
					},
				},
				Role: openai.ChatMessageRoleAssistant,
				ToolCalls: []openai.ChatCompletionMessageToolCallParam{
					{
						ID: ptr.To("call_weather"),
						Function: openai.ChatCompletionMessageToolCallFunctionParam{
							Name:      "get_weather",
							Arguments: `{"location":"San Francisco"}`,
						},
						Type: openai.ChatCompletionMessageToolCallTypeFunction,
					},
				},
			},
			expectedParts: []*genai.Part{
				genai.NewPartFromFunctionCall("get_weather", map[string]any{
					"location": "San Francisco",
				}),
				{
					Text:    "I need to call a function to get the weather",
					Thought: true,
				},
				genai.NewPartFromText("Let me get the weather for you"),
			},
			expectedToolCalls: map[string]string{
				"call_weather": "get_weather",
			},
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			parts, toolCalls, err := assistantMsgToGeminiParts(tc.msg)

			if tc.expectedErrorMsg != "" || err != nil {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrorMsg)
			} else {
				require.NoError(t, err)
				if d := cmp.Diff(tc.expectedParts, parts); d != "" {
					t.Errorf("Parts mismatch (-want +got):\n%s", d)
				}
				if d := cmp.Diff(tc.expectedToolCalls, toolCalls); d != "" {
					t.Errorf("Tools mismatch (-want +got):\n%s", d)
				}
			}
		})
	}
}

func TestDeveloperMsgToGeminiParts(t *testing.T) {
	tests := []struct {
		name             string
		msg              openai.ChatCompletionDeveloperMessageParam
		expectedParts    []*genai.Part
		expectedErrorMsg string
	}{
		{
			name: "string content",
			msg: openai.ChatCompletionDeveloperMessageParam{
				Content: openai.ContentUnion{
					Value: "This is a system message",
				},
				Role: openai.ChatMessageRoleSystem,
			},
			expectedParts: []*genai.Part{
				{Text: "This is a system message"},
			},
		},
		{
			name: "content as string array",
			msg: openai.ChatCompletionDeveloperMessageParam{
				Content: openai.ContentUnion{
					Value: []openai.ChatCompletionContentPartTextParam{
						{Text: "This is a system message"},
						{Text: "It can be multiline"},
					},
				},
				Role: openai.ChatMessageRoleSystem,
			},
			expectedParts: []*genai.Part{
				{Text: "This is a system message"},
				{Text: "It can be multiline"},
			},
		},
		{
			name: "invalid content type",
			msg: openai.ChatCompletionDeveloperMessageParam{
				Content: openai.ContentUnion{
					Value: 10, // Invalid type.
				},
				Role: openai.ChatMessageRoleSystem,
			},
			expectedParts: []*genai.Part{
				{Text: "This is a system message"},
			},
			expectedErrorMsg: "unsupported content type in developer message: int",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			content, err := developerMsgToGeminiParts(tc.msg)

			if tc.expectedErrorMsg != "" || err != nil {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrorMsg)
			} else {
				require.NoError(t, err)
				if d := cmp.Diff(tc.expectedParts, content); d != "" {
					t.Errorf("Content mismatch (-want +got):\n%s", d)
				}
			}
		})
	}
}

func TestToolMsgToGeminiParts(t *testing.T) {
	tests := []struct {
		name             string
		msg              openai.ChatCompletionToolMessageParam
		knownToolCalls   map[string]string
		expectedPart     *genai.Part
		expectedErrorMsg string
	}{
		{
			name: "Tool message with invalid content",
			msg: openai.ChatCompletionToolMessageParam{
				Content: openai.ContentUnion{
					Value: 10, // Invalid type.
				},
				Role:       openai.ChatMessageRoleTool,
				ToolCallID: "tool_123",
			},
			knownToolCalls:   map[string]string{"tool_123": "get_weather"},
			expectedErrorMsg: "unsupported content type in tool message: int",
		},
		{
			name: "Tool message with string content",
			msg: openai.ChatCompletionToolMessageParam{
				Content: openai.ContentUnion{
					Value: "This is a tool message",
				},
				Role:       openai.ChatMessageRoleTool,
				ToolCallID: "tool_123",
			},
			knownToolCalls: map[string]string{"tool_123": "get_weather"},
			expectedPart: &genai.Part{
				FunctionResponse: &genai.FunctionResponse{
					Name:     "get_weather",
					Response: map[string]any{"output": "This is a tool message"},
				},
			},
		},
		{
			name: "Tool message with string array content",
			msg: openai.ChatCompletionToolMessageParam{
				Content: openai.ContentUnion{
					Value: []openai.ChatCompletionContentPartTextParam{
						{
							Type: string(openai.ChatCompletionContentPartTextTypeText),
							Text: "This is a tool message. ",
						},
						{
							Type: string(openai.ChatCompletionContentPartTextTypeText),
							Text: "And this is another part",
						},
					},
				},
				Role:       openai.ChatMessageRoleTool,
				ToolCallID: "tool_123",
			},
			knownToolCalls: map[string]string{"tool_123": "get_weather"},
			expectedPart: &genai.Part{
				FunctionResponse: &genai.FunctionResponse{
					Name:     "get_weather",
					Response: map[string]any{"output": "This is a tool message. And this is another part"},
				},
			},
		},
	}
	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			parts, err := toolMsgToGeminiParts(tc.msg, tc.knownToolCalls)

			if tc.expectedErrorMsg != "" || err != nil {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrorMsg)
			} else {
				require.NoError(t, err)
				if d := cmp.Diff(tc.expectedPart, parts); d != "" {
					t.Errorf("Parts mismatch (-want +got):\n%s", d)
				}
			}
		})
	}
}

// TestUserMsgToGeminiParts tests the gcpPartsFromUserMsgToGeminiParts function with different inputs.
func TestUserMsgToGeminiParts(t *testing.T) {
	tests := []struct {
		name           string
		msg            openai.ChatCompletionUserMessageParam
		expectedParts  []*genai.Part
		expectedErrMsg string
	}{
		{
			name: "simple string content",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: "Hello, how are you?",
				},
			},
			expectedParts: []*genai.Part{
				{Text: "Hello, how are you?"},
			},
		},
		{
			name: "empty string content",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: "",
				},
			},
			expectedParts: nil,
		},
		{
			name: "array with multiple text contents",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfText: &openai.ChatCompletionContentPartTextParam{
								Type: string(openai.ChatCompletionContentPartTextTypeText),
								Text: "First message",
							},
						},
						{
							OfText: &openai.ChatCompletionContentPartTextParam{
								Type: string(openai.ChatCompletionContentPartTextTypeText),
								Text: "Second message",
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{Text: "First message"},
				{Text: "Second message"},
			},
		},
		{
			name: "image content with URL",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL: "https://example.com/image.jpg",
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{FileData: &genai.FileData{FileURI: "https://example.com/image.jpg", MIMEType: "image/jpeg"}},
			},
		},
		{
			name: "empty image URL",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL: "",
								},
							},
						},
					},
				},
			},
			expectedParts: nil,
		},
		{
			name: "invalid image URL",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL: ":%invalid-url%:",
								},
							},
						},
					},
				},
			},
			expectedErrMsg: "invalid image URL",
		},
		{
			name: "mixed content - text and image",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfText: &openai.ChatCompletionContentPartTextParam{
								Type: string(openai.ChatCompletionContentPartTextTypeText),
								Text: "Check this image:",
							},
						},
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL: "https://example.com/image.jpg",
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{Text: "Check this image:"},
				{FileData: &genai.FileData{FileURI: "https://example.com/image.jpg", MIMEType: "image/jpeg"}},
			},
		},
		{
			name: "data URI image content",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL: "",
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{
					InlineData: &genai.Blob{
						Data:     []byte("This field is ignored during testcase comparison"),
						MIMEType: "image/jpeg",
					},
				},
			},
		},
		{
			name: "invalid data URI format",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL: "data:invalid-format",
								},
							},
						},
					},
				},
			},
			expectedErrMsg: "data uri does not have a valid format",
		},
		{
			name: "audio content - not supported",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfInputAudio: &openai.ChatCompletionContentPartInputAudioParam{
								Type: "audio",
							},
						},
					},
				},
			},
			expectedErrMsg: "audio content not supported yet",
		},
		{
			name: "unsupported content type",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: 42, // not a string or array.
				},
			},
			expectedErrMsg: "unsupported content type in user message: int",
		},
		{
			name: "image with low detail",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL:    "https://example.com/image.jpg",
									Detail: openai.ChatCompletionContentPartImageImageURLDetailLow,
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{
					FileData: &genai.FileData{
						FileURI:  "https://example.com/image.jpg",
						MIMEType: "image/jpeg",
					},
					MediaResolution: &genai.PartMediaResolution{
						Level: genai.PartMediaResolutionLevelMediaResolutionLow,
					},
				},
			},
		},
		{
			name: "image with medium detail",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL:    "https://example.com/image.jpg",
									Detail: openai.ChatCompletionContentPartImageImageURLDetailMedium,
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{
					FileData: &genai.FileData{
						FileURI:  "https://example.com/image.jpg",
						MIMEType: "image/jpeg",
					},
					MediaResolution: &genai.PartMediaResolution{
						Level: genai.PartMediaResolutionLevelMediaResolutionMedium,
					},
				},
			},
		},
		{
			name: "image with high detail",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL:    "https://example.com/image.jpg",
									Detail: openai.ChatCompletionContentPartImageImageURLDetailHigh,
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{
					FileData: &genai.FileData{
						FileURI:  "https://example.com/image.jpg",
						MIMEType: "image/jpeg",
					},
					MediaResolution: &genai.PartMediaResolution{
						Level: genai.PartMediaResolutionLevelMediaResolutionHigh,
					},
				},
			},
		},
		{
			name: "image with auto detail",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL:    "https://example.com/image.jpg",
									Detail: openai.ChatCompletionContentPartImageImageURLDetailAuto,
								},
							},
						},
					},
				},
			},
			expectedParts: []*genai.Part{
				{
					FileData: &genai.FileData{
						FileURI:  "https://example.com/image.jpg",
						MIMEType: "image/jpeg",
					},
					MediaResolution: &genai.PartMediaResolution{
						Level: genai.PartMediaResolutionLevelMediaResolutionUnspecified,
					},
				},
			},
		},
		{
			name: "image with invalid detail",
			msg: openai.ChatCompletionUserMessageParam{
				Role: openai.ChatMessageRoleUser,
				Content: openai.StringOrUserRoleContentUnion{
					Value: []openai.ChatCompletionContentPartUserUnionParam{
						{
							OfImageURL: &openai.ChatCompletionContentPartImageParam{
								Type: openai.ChatCompletionContentPartImageTypeImageURL,
								ImageURL: openai.ChatCompletionContentPartImageImageURLParam{
									URL:    "https://example.com/image.jpg",
									Detail: openai.ChatCompletionContentPartImageImageURLDetail("invalid"),
								},
							},
						},
					},
				},
			},
			expectedErrMsg: "invalid Detail:",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			parts, err := userMsgToGeminiParts(tc.msg, "gemini-3-pro")

			if tc.expectedErrMsg != "" || err != nil {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrMsg)
			} else {
				if d := cmp.Diff(tc.expectedParts, parts, cmpopts.IgnoreFields(genai.Blob{}, "Data")); d != "" {
					t.Errorf("Parts mismatch (-want +got):\n%s", d)
				}
			}
		})
	}
}

func TestOpenAIReqToGeminiGenerationConfig(t *testing.T) {
	tests := []struct {
		name                     string
		input                    *openai.ChatCompletionRequest
		expectedGenerationConfig *genai.GenerationConfig
		expectedResponseMode     geminiResponseMode
		expectedErrMsg           string
		requestModel             internalapi.RequestModel
	}{
		{
			name: "all fields set",
			input: &openai.ChatCompletionRequest{
				Temperature:      ptr.To(0.7),
				TopP:             ptr.To(0.9),
				Seed:             ptr.To(42),
				TopLogProbs:      ptr.To(3),
				LogProbs:         ptr.To(true),
				N:                ptr.To(2),
				MaxTokens:        ptr.To(int64(256)),
				PresencePenalty:  ptr.To(float32(1.1)),
				FrequencyPenalty: ptr.To(float32(0.5)),
				Stop: openaigo.ChatCompletionNewParamsStopUnion{
					OfStringArray: []string{"stop1", "stop2"},
				},
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				Temperature:      ptr.To(float32(0.7)),
				TopP:             ptr.To(float32(0.9)),
				Seed:             ptr.To(int32(42)),
				Logprobs:         ptr.To(int32(3)),
				ResponseLogprobs: true,
				CandidateCount:   2,
				MaxOutputTokens:  256,
				PresencePenalty:  ptr.To(float32(1.1)),
				FrequencyPenalty: ptr.To(float32(0.5)),
				StopSequences:    []string{"stop1", "stop2"},
			},
			expectedResponseMode: responseModeNone,
			requestModel:         "gemini-2.5-flash",
		},
		{
			name:                     "minimal fields",
			input:                    &openai.ChatCompletionRequest{},
			expectedGenerationConfig: &genai.GenerationConfig{},
			expectedResponseMode:     responseModeNone,
		},
		{
			name: "stop sequences",
			input: &openai.ChatCompletionRequest{
				Stop: openaigo.ChatCompletionNewParamsStopUnion{
					OfString: openaigo.Opt[string]("stop1"),
				},
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				StopSequences: []string{"stop1"},
			},
			expectedResponseMode: responseModeNone,
			requestModel:         "gemini-2.5-flash",
		},
		{
			name: "text",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfText: &openai.ChatCompletionResponseFormatTextParam{
						Type: openai.ChatCompletionResponseFormatTypeText,
					},
				},
			},
			expectedGenerationConfig: &genai.GenerationConfig{ResponseMIMEType: "text/plain"},
			expectedResponseMode:     responseModeText,
			requestModel:             "gemini-2.5-flash",
		},
		{
			name: "json object",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONObject: &openai.ChatCompletionResponseFormatJSONObjectParam{
						Type: openai.ChatCompletionResponseFormatTypeJSONObject,
					},
				},
			},
			expectedGenerationConfig: &genai.GenerationConfig{ResponseMIMEType: "application/json"},
			expectedResponseMode:     responseModeJSON,
			requestModel:             "gemini-2.5-flash",
		},
		{
			name: "json schema (map)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{"type": "string"}`),
						},
					},
				},
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType:   "application/json",
				ResponseJsonSchema: map[string]any{"type": "string"},
			},
			expectedResponseMode: responseModeJSON,
			requestModel:         "gemini-2.5-flash",
		},
		{
			name: "json schema (string)",
			input: &openai.ChatCompletionRequest{
				Model: "gemini-2.5",
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{"type":"string"}`),
						},
					},
				},
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType:   "application/json",
				ResponseJsonSchema: map[string]any{"type": "string"},
			},
			expectedResponseMode: responseModeJSON,
			requestModel:         "gemini-2.5-flash",
		},
		{
			name: "json schema (invalid string)",
			input: &openai.ChatCompletionRequest{
				Model: "gemini-2.5",
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{"type":`), // invalid JSON.
						},
					},
				},
			},
			expectedErrMsg: "invalid JSON schema",
			requestModel:   "gemini-2.5-flash",
		},
		{
			name: "guided choice",
			input: &openai.ChatCompletionRequest{
				GuidedChoice: []string{"Positive", "Negative"},
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "text/x.enum",
				ResponseSchema:   &genai.Schema{Type: "STRING", Enum: []string{"Positive", "Negative"}},
			},
			expectedResponseMode: responseModeEnum,
			requestModel:         "gemini-2.5-flash",
		},
		{
			name: "guided regex",
			input: &openai.ChatCompletionRequest{
				GuidedRegex: "\\w+@\\w+\\.com\\n",
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema:   &genai.Schema{Type: "STRING", Pattern: "\\w+@\\w+\\.com\\n"},
			},
			expectedResponseMode: responseModeRegex,
			requestModel:         "gemini-2.5-flash",
		},
		{
			name: "guided json",
			input: &openai.ChatCompletionRequest{
				GuidedJSON: json.RawMessage(`{"type": "string"}`),
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType:   "application/json",
				ResponseJsonSchema: json.RawMessage(`{"type": "string"}`),
			},
			expectedResponseMode: responseModeJSON,
		},
		{
			name: "multiple format specifiers - ResponseFormat and GuidedChoice",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfText: &openai.ChatCompletionResponseFormatTextParam{
						Type: openai.ChatCompletionResponseFormatTypeText,
					},
				},
				GuidedChoice: []string{"A", "B"},
			},
			expectedErrMsg: "multiple format specifiers specified",
			requestModel:   "gemini-2.5-flash",
		},
		{
			name: "reasoning effort low",
			input: &openai.ChatCompletionRequest{
				ReasoningEffort: openaigo.ReasoningEffortLow,
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ThinkingConfig: &genai.ThinkingConfig{
					ThinkingLevel: genai.ThinkingLevelLow,
				},
			},
			expectedResponseMode: responseModeNone,
			requestModel:         "gemini-3-pro",
		},
		{
			name: "reasoning effort medium",
			input: &openai.ChatCompletionRequest{
				ReasoningEffort: openaigo.ReasoningEffortMedium,
			},
			expectedGenerationConfig: &genai.GenerationConfig{
				ThinkingConfig: &genai.ThinkingConfig{
					ThinkingLevel: genai.ThinkingLevelHigh,
				},
			},
			expectedResponseMode: responseModeNone,
			requestModel:         "gemini-3-pro",
		},
		{
			name: "reasoning effort unsupported",
			input: &openai.ChatCompletionRequest{
				ReasoningEffort: openaigo.ReasoningEffortHigh,
			},
			expectedErrMsg: "reasoning effort:",
			requestModel:   "gemini-3-pro",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			got, responseMode, err := openAIReqToGeminiGenerationConfig(tc.input, tc.requestModel)
			if tc.expectedErrMsg != "" {
				require.ErrorContains(t, err, tc.expectedErrMsg)
			} else {
				require.NoError(t, err)

				if diff := cmp.Diff(tc.expectedGenerationConfig, got, cmpopts.IgnoreUnexported(genai.GenerationConfig{})); diff != "" {
					t.Errorf("GenerationConfig mismatch (-want +got):\n%s", diff)
				}

				if responseMode != tc.expectedResponseMode {
					t.Errorf("geminiResponseMode mismatch: got %v, want %v", responseMode, tc.expectedResponseMode)
				}
			}
		})
	}
}

func TestOpenAIToolsToGeminiTools(t *testing.T) {
	funcParams := map[string]any{
		"type": "object",
		"properties": map[string]any{
			"a": map[string]any{"type": "integer"},
			"b": map[string]any{"type": "integer"},
		},
		"required": []any{"a", "b"},
	}
	tests := []struct {
		name                          string
		openaiTools                   []openai.Tool
		parametersJSONSchemaAvailable bool
		expected                      []genai.Tool
		expectedError                 string
	}{
		{
			name:                          "empty tools with parametersJSONSchemaAvailable=false",
			openaiTools:                   nil,
			parametersJSONSchemaAvailable: false,
			expected:                      nil,
		},
		{
			name:                          "empty tools with parametersJSONSchemaAvailable=true",
			openaiTools:                   nil,
			parametersJSONSchemaAvailable: true,
			expected:                      nil,
		},
		{
			name: "single function tool with parameters - parametersJSONSchemaAvailable=false",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "add",
						Description: "Add two numbers",
						Parameters:  funcParams,
					},
				},
			},
			parametersJSONSchemaAvailable: false,
			expected: []genai.Tool{
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Name:        "add",
							Description: "Add two numbers",
							Parameters: &genai.Schema{
								Type: "object",
								Properties: map[string]*genai.Schema{
									"a": {Type: "integer"},
									"b": {Type: "integer"},
								},
								Required: []string{"a", "b"},
							},
						},
					},
				},
			},
		},
		{
			name: "single function tool with parameters - parametersJSONSchemaAvailable=true",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "add",
						Description: "Add two numbers",
						Parameters:  funcParams,
					},
				},
			},
			parametersJSONSchemaAvailable: true,
			expected: []genai.Tool{
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Name:                 "add",
							Description:          "Add two numbers",
							ParametersJsonSchema: funcParams,
						},
					},
				},
			},
		},
		{
			name: "multiple function tools with nil/empty parameters - parametersJSONSchemaAvailable=false",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "foo",
						Description: "Foo function",
						Parameters:  map[string]any{}, // empty parameters
					},
				},
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "bar",
						Description: "Bar function",
						Parameters:  nil, // nil parameters
					},
				},
			},
			parametersJSONSchemaAvailable: false,
			expected: []genai.Tool{
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Name:        "foo",
							Description: "Foo function",
							Parameters:  nil,
						},
						{
							Name:        "bar",
							Description: "Bar function",
							Parameters:  nil,
						},
					},
				},
			},
		},
		{
			name: "multiple function tools with nil/empty parameters - parametersJSONSchemaAvailable=true",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "foo",
						Description: "Foo function",
					},
				},
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "bar",
						Description: "Bar function",
					},
				},
			},
			parametersJSONSchemaAvailable: true,
			expected: []genai.Tool{
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Name:               "foo",
							Description:        "Foo function",
							ResponseJsonSchema: nil,
						},
						{
							Name:               "bar",
							Description:        "Bar function",
							ResponseJsonSchema: nil,
						},
					},
				},
			},
		},
		{
			name: "tool with invalid parameters schema - parametersJSONSchemaAvailable=false",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "bad",
						Description: "Bad function",
						Parameters:  "invalid-json",
					},
				},
			},
			parametersJSONSchemaAvailable: false,
			expectedError:                 "invalid JSON schema for parameters in tool bad: expected map[string]any",
		},
		{
			name: "tool with invalid parameters schema - parametersJSONSchemaAvailable=true",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "bad",
						Description: "Bad function",
						Parameters:  "invalid-json",
					},
				},
			},
			parametersJSONSchemaAvailable: true,
			expected: []genai.Tool{
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Description: "Bad function",
							Name:        "bad",
							// ai-gateway does not validate schema, upstream will be expected to validate the bad json param
							ParametersJsonSchema: "invalid-json",
						},
					},
				},
			},
		},
		{
			name: "complex nested schema - parametersJSONSchemaAvailable=false",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "complex_tool",
						Description: "Complex tool with nested parameters",
						Parameters: map[string]any{
							"type": "object",
							"properties": map[string]any{
								"user": map[string]any{
									"type": "object",
									"properties": map[string]any{
										"name": map[string]any{"type": "string"},
										"age":  map[string]any{"type": "integer"},
									},
									"required": []any{"name"},
								},
								"items": map[string]any{
									"type": "array",
									"items": map[string]any{
										"type": "object",
										"properties": map[string]any{
											"id":   map[string]any{"type": "integer"},
											"name": map[string]any{"type": "string"},
										},
									},
								},
							},
							"required": []any{"user"},
						},
					},
				},
			},
			parametersJSONSchemaAvailable: false,
			expected: []genai.Tool{
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Name:        "complex_tool",
							Description: "Complex tool with nested parameters",
							Parameters: &genai.Schema{
								Type: "object",
								Properties: map[string]*genai.Schema{
									"user": {
										Type: "object",
										Properties: map[string]*genai.Schema{
											"name": {Type: "string"},
											"age":  {Type: "integer"},
										},
										Required: []string{"name"},
									},
									"items": {
										Type: "array",
										Items: &genai.Schema{
											Type: "object",
											Properties: map[string]*genai.Schema{
												"id":   {Type: "integer"},
												"name": {Type: "string"},
											},
										},
									},
								},
								Required: []string{"user"},
							},
						},
					},
				},
			},
		},
		{
			name: "non-function tool is ignored - both modes",
			openaiTools: []openai.Tool{
				{
					Type: "retrieval",
				},
			},
			parametersJSONSchemaAvailable: false,
			expectedError:                 "unsupported tool type: retrieval",
		},
		{
			name: "mixed valid and invalid tools - parametersJSONSchemaAvailable=false",
			openaiTools: []openai.Tool{
				{
					Type: "retrieval", // Should be ignored
				},
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "valid_tool",
						Description: "Valid function tool",
						Parameters: map[string]any{
							"type": "object",
							"properties": map[string]any{
								"param": map[string]any{"type": "string"},
							},
						},
					},
				},
			},
			parametersJSONSchemaAvailable: false,
			expectedError:                 "unsupported tool type: retrieval",
		},
		{
			name: "tool with nil function - should not panic",
			openaiTools: []openai.Tool{
				{
					Type:     openai.ToolTypeFunction,
					Function: nil,
				},
			},
			parametersJSONSchemaAvailable: false,
			expected:                      nil,
		},
		{
			name: "enterprise search tool only",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeEnterpriseWebSearch,
				},
			},
			parametersJSONSchemaAvailable: false,
			expected: []genai.Tool{
				{
					EnterpriseWebSearch: &genai.EnterpriseWebSearch{},
				},
			},
		},
		{
			name: "mixed function and enterprise search tools",
			openaiTools: []openai.Tool{
				{
					Type: openai.ToolTypeFunction,
					Function: &openai.FunctionDefinition{
						Name:        "get_weather",
						Description: "Get current weather",
						Parameters:  funcParams,
					},
				},
				{
					Type: openai.ToolTypeEnterpriseWebSearch,
				},
			},
			parametersJSONSchemaAvailable: false,
			expected: []genai.Tool{
				{
					EnterpriseWebSearch: &genai.EnterpriseWebSearch{},
				},
				{
					FunctionDeclarations: []*genai.FunctionDeclaration{
						{
							Name:        "get_weather",
							Description: "Get current weather",
							Parameters: &genai.Schema{
								Type: "object",
								Properties: map[string]*genai.Schema{
									"a": {Type: "integer"},
									"b": {Type: "integer"},
								},
								Required: []string{"a", "b"},
							},
						},
					},
				},
			},
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			result, err := openAIToolsToGeminiTools(tc.openaiTools, tc.parametersJSONSchemaAvailable)
			if tc.expectedError != "" {
				require.ErrorContains(t, err, tc.expectedError)
			} else {
				require.NoError(t, err)
				if d := cmp.Diff(tc.expected, result, cmpopts.IgnoreUnexported(genai.Schema{})); d != "" {
					t.Errorf("Result mismatch (-want +got):\n%s", d)
				}
			}
		})
	}
}

func TestOpenAIToolChoiceToGeminiToolConfig(t *testing.T) {
	tests := []struct {
		name      string
		input     *openai.ChatCompletionToolChoiceUnion
		expected  *genai.ToolConfig
		expectErr string
	}{
		{
			name:     "string auto",
			input:    &openai.ChatCompletionToolChoiceUnion{Value: "auto"},
			expected: &genai.ToolConfig{FunctionCallingConfig: &genai.FunctionCallingConfig{Mode: genai.FunctionCallingConfigModeAuto}},
		},
		{
			name:     "string none",
			input:    &openai.ChatCompletionToolChoiceUnion{Value: "none"},
			expected: &genai.ToolConfig{FunctionCallingConfig: &genai.FunctionCallingConfig{Mode: genai.FunctionCallingConfigModeNone}},
		},
		{
			name:     "string required",
			input:    &openai.ChatCompletionToolChoiceUnion{Value: "required"},
			expected: &genai.ToolConfig{FunctionCallingConfig: &genai.FunctionCallingConfig{Mode: genai.FunctionCallingConfigModeAny}},
		},
		{
			name: "ToolChoice struct",
			input: &openai.ChatCompletionToolChoiceUnion{
				Value: openai.ChatCompletionNamedToolChoice{
					Type:     openai.ToolTypeFunction,
					Function: openai.ChatCompletionNamedToolChoiceFunction{Name: "myfunc"},
				},
			},
			expected: &genai.ToolConfig{
				FunctionCallingConfig: &genai.FunctionCallingConfig{
					Mode:                 genai.FunctionCallingConfigModeAny,
					AllowedFunctionNames: []string{"myfunc"},
				},
				RetrievalConfig: nil,
			},
		},
		{
			name:      "unsupported type",
			input:     &openai.ChatCompletionToolChoiceUnion{Value: 123},
			expectErr: "unsupported tool choice type",
		},
		{
			name:      "unsupported string value",
			input:     &openai.ChatCompletionToolChoiceUnion{Value: "invalid"},
			expectErr: "unsupported tool choice: 'invalid'",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			got, err := openAIToolChoiceToGeminiToolConfig(tc.input)
			if tc.expectErr != "" {
				require.ErrorContains(t, err, tc.expectErr)
			} else {
				require.NoError(t, err)
				if diff := cmp.Diff(tc.expected, got, cmpopts.IgnoreUnexported(genai.ToolConfig{}, genai.FunctionCallingConfig{})); diff != "" {
					t.Errorf("ToolConfig mismatch (-want +got):\n%s", diff)
				}
			}
		})
	}
}

func TestGeminiLogprobsToOpenAILogprobs(t *testing.T) {
	tests := []struct {
		name     string
		input    genai.LogprobsResult
		expected openai.ChatCompletionChoicesLogprobs
	}{
		{
			name:     "empty logprobs result",
			input:    genai.LogprobsResult{},
			expected: openai.ChatCompletionChoicesLogprobs{},
		},
		{
			name: "single chosen candidate without top candidates",
			input: genai.LogprobsResult{
				ChosenCandidates: []*genai.LogprobsResultCandidate{
					{
						Token:          "hello",
						LogProbability: -0.5,
					},
				},
			},
			expected: openai.ChatCompletionChoicesLogprobs{
				Content: []openai.ChatCompletionTokenLogprob{
					{
						Token:       "hello",
						Logprob:     -0.5,
						TopLogprobs: nil,
					},
				},
			},
		},
		{
			name: "multiple chosen candidates with top candidates",
			input: genai.LogprobsResult{
				ChosenCandidates: []*genai.LogprobsResultCandidate{
					{
						Token:          "hello",
						LogProbability: -0.5,
					},
					{
						Token:          "world",
						LogProbability: -0.3,
					},
				},
				TopCandidates: []*genai.LogprobsResultTopCandidates{
					{
						Candidates: []*genai.LogprobsResultCandidate{
							{Token: "hello", LogProbability: -0.5},
							{Token: "hi", LogProbability: -1.2},
							{Token: "hey", LogProbability: -1.5},
						},
					},
					{
						Candidates: []*genai.LogprobsResultCandidate{
							{Token: "world", LogProbability: -0.3},
							{Token: "earth", LogProbability: -1.1},
						},
					},
				},
			},
			expected: openai.ChatCompletionChoicesLogprobs{
				Content: []openai.ChatCompletionTokenLogprob{
					{
						Token:   "hello",
						Logprob: -0.5,
						TopLogprobs: []openai.ChatCompletionTokenLogprobTopLogprob{
							{Token: "hello", Logprob: -0.5},
							{Token: "hi", Logprob: -1.2},
							{Token: "hey", Logprob: -1.5},
						},
					},
					{
						Token:   "world",
						Logprob: -0.3,
						TopLogprobs: []openai.ChatCompletionTokenLogprobTopLogprob{
							{Token: "world", Logprob: -0.3},
							{Token: "earth", Logprob: -1.1},
						},
					},
				},
			},
		},
		{
			name: "chosen candidates with nil top candidates entry",
			input: genai.LogprobsResult{
				ChosenCandidates: []*genai.LogprobsResultCandidate{
					{
						Token:          "test",
						LogProbability: -0.8,
					},
				},
				TopCandidates: []*genai.LogprobsResultTopCandidates{
					nil,
				},
			},
			expected: openai.ChatCompletionChoicesLogprobs{
				Content: []openai.ChatCompletionTokenLogprob{
					{
						Token:       "test",
						Logprob:     -0.8,
						TopLogprobs: nil,
					},
				},
			},
		},
		{
			name: "chosen candidates with empty top candidates",
			input: genai.LogprobsResult{
				ChosenCandidates: []*genai.LogprobsResultCandidate{
					{
						Token:          "empty",
						LogProbability: -0.2,
					},
				},
				TopCandidates: []*genai.LogprobsResultTopCandidates{
					{
						Candidates: []*genai.LogprobsResultCandidate{},
					},
				},
			},
			expected: openai.ChatCompletionChoicesLogprobs{
				Content: []openai.ChatCompletionTokenLogprob{
					{
						Token:       "empty",
						Logprob:     -0.2,
						TopLogprobs: nil,
					},
				},
			},
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			result := geminiLogprobsToOpenAILogprobs(tt.input)
			// Use cmp.Equal with EquateApprox for float comparison due to float32->float64 conversion.
			if !cmp.Equal(tt.expected, result, cmpopts.EquateApprox(0, 0.0001)) {
				t.Errorf("geminiLogprobsToOpenAILogprobs() diff:\n%s", cmp.Diff(tt.expected, result, cmpopts.EquateApprox(0, 0.0001)))
			}
		})
	}
}

func TestExtractToolCallsFromGeminiParts(t *testing.T) {
	toolCalls := []openai.ChatCompletionMessageToolCallParam{}
	tests := []struct {
		name     string
		input    []*genai.Part
		expected []openai.ChatCompletionMessageToolCallParam
		wantErr  bool
	}{
		{
			name:     "nil parts",
			input:    nil,
			expected: nil,
		},
		{
			name:     "empty parts",
			input:    []*genai.Part{},
			expected: nil,
		},
		{
			name: "parts without function calls",
			input: []*genai.Part{
				{Text: "some text"},
				nil,
				{Text: "more text"},
			},
			expected: nil,
		},
		{
			name: "single function call",
			input: []*genai.Part{
				{
					FunctionCall: &genai.FunctionCall{
						Name: "get_weather",
						Args: map[string]any{
							"location": "San Francisco",
							"unit":     "celsius",
						},
					},
				},
			},
			expected: []openai.ChatCompletionMessageToolCallParam{
				{
					ID:   ptr.To("0"),
					Type: openai.ChatCompletionMessageToolCallTypeFunction,
					Function: openai.ChatCompletionMessageToolCallFunctionParam{
						Name:      "get_weather",
						Arguments: `{"location":"San Francisco","unit":"celsius"}`,
					},
				},
			},
		},
		{
			name: "multiple function calls",
			input: []*genai.Part{
				{
					FunctionCall: &genai.FunctionCall{
						Name: "function1",
						Args: map[string]any{"param1": "value1"},
					},
				},
				{Text: "some text between"},
				{
					FunctionCall: &genai.FunctionCall{
						Name: "function2",
						Args: map[string]any{"param2": float64(42)},
					},
				},
			},
			expected: []openai.ChatCompletionMessageToolCallParam{
				{
					ID:   ptr.To("0"),
					Type: openai.ChatCompletionMessageToolCallTypeFunction,
					Function: openai.ChatCompletionMessageToolCallFunctionParam{
						Name:      "function1",
						Arguments: `{"param1":"value1"}`,
					},
				},
				{
					ID:   ptr.To("1"),
					Type: openai.ChatCompletionMessageToolCallTypeFunction,
					Function: openai.ChatCompletionMessageToolCallFunctionParam{
						Name:      "function2",
						Arguments: `{"param2":42}`,
					},
				},
			},
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			calls, err := extractToolCallsFromGeminiParts(toolCalls, tt.input)

			if tt.wantErr {
				require.Error(t, err)
				return
			}

			require.NoError(t, err)

			// Normalize IDs since they're generated.
			for i := range calls {
				calls[i].ID = ptr.To(fmt.Sprintf("%d", i))
			}

			require.Equal(t, tt.expected, calls)
		})
	}
}

func TestGeminiFinishReasonToOpenAI(t *testing.T) {
	tests := []struct {
		name      string
		input     genai.FinishReason
		toolCalls []openai.ChatCompletionMessageToolCallParam
		expected  openai.ChatCompletionChoicesFinishReason
	}{
		{
			name:      "stop reason",
			input:     genai.FinishReasonStop,
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  openai.ChatCompletionChoicesFinishReasonStop,
		},
		{
			name:  "tool calls reason",
			input: genai.FinishReasonStop,
			toolCalls: []openai.ChatCompletionMessageToolCallParam{
				{
					ID: ptr.To("tool_call_1"),
					Function: openai.ChatCompletionMessageToolCallFunctionParam{
						Name:      "example_tool",
						Arguments: "{\"param1\":\"value1\"}",
					},
					Type: openai.ChatCompletionMessageToolCallTypeFunction,
				},
			},
			expected: openai.ChatCompletionChoicesFinishReasonToolCalls,
		},
		{
			name:      "max tokens reason",
			input:     genai.FinishReasonMaxTokens,
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  openai.ChatCompletionChoicesFinishReasonLength,
		},
		{
			name:      "empty reason for streaming",
			input:     "",
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  "",
		},
		{
			name:      "safety reason",
			input:     genai.FinishReasonSafety,
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  openai.ChatCompletionChoicesFinishReasonContentFilter,
		},
		{
			name:      "recitation reason",
			input:     genai.FinishReasonRecitation,
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  openai.ChatCompletionChoicesFinishReasonContentFilter,
		},
		{
			name:      "other reason",
			input:     genai.FinishReasonOther,
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  openai.ChatCompletionChoicesFinishReasonContentFilter,
		},
		{
			name:      "unknown reason",
			input:     genai.FinishReason("unknown_reason"),
			toolCalls: []openai.ChatCompletionMessageToolCallParam{},
			expected:  openai.ChatCompletionChoicesFinishReasonContentFilter,
		},
	}

	for _, tt := range tests {
		t.Run(tt.name, func(t *testing.T) {
			result := geminiFinishReasonToOpenAI(tt.input, tt.toolCalls)
			require.Equal(t, tt.expected, result)
		})
	}
}

func TestExtractTextAndThoughtSummaryFromGeminiParts(t *testing.T) {
	tests := []struct {
		name                   string
		parts                  []*genai.Part
		responseMode           geminiResponseMode
		expectedThoughtSummary string
		expectedText           string
	}{
		{
			name:                   "nil parts",
			parts:                  nil,
			responseMode:           responseModeNone,
			expectedThoughtSummary: "",
			expectedText:           "",
		},
		{
			name:                   "empty parts",
			parts:                  []*genai.Part{},
			responseMode:           responseModeNone,
			expectedThoughtSummary: "",
			expectedText:           "",
		},
		{
			name: "multiple text parts without regex mode",
			parts: []*genai.Part{
				{Text: "Hello, "},
				{Text: "world!"},
			},
			responseMode:           responseModeJSON,
			expectedThoughtSummary: "",
			expectedText:           "Hello, world!",
		},
		{
			name: "regex mode with mixed quoted and unquoted text",
			parts: []*genai.Part{
				{Text: `"positive"`},
				{Text: `unquoted`},
				{Text: `"negative"`},
			},
			responseMode:           responseModeRegex,
			expectedThoughtSummary: "",
			expectedText:           "positiveunquotednegative",
		},
		{
			name: "regex mode with only double-quoted first and last words",
			parts: []*genai.Part{
				{Text: "\"\"ERROR\" Unable to connect to database \"DatabaseModule\"\""},
			},
			responseMode:           responseModeRegex,
			expectedThoughtSummary: "",
			expectedText:           "\"ERROR\" Unable to connect to database \"DatabaseModule\"",
		},
		{
			name: "non-regex mode with double-quoted text (should not remove quotes)",
			parts: []*genai.Part{
				{Text: `"positive"`},
			},
			responseMode:           responseModeJSON,
			expectedThoughtSummary: "",
			expectedText:           `"positive"`,
		},
		{
			name: "regex mode with text containing internal quotes",
			parts: []*genai.Part{
				{Text: `"He said \"hello\" to me"`},
			},
			responseMode:           responseModeRegex,
			expectedThoughtSummary: "",
			expectedText:           `He said \"hello\" to me`,
		},
		{
			name: "test thought summary",
			parts: []*genai.Part{
				{Text: "Let me think step by step", Thought: true},
				{Text: "Here is the conclusion"},
			},
			expectedThoughtSummary: "Let me think step by step",
			expectedText:           "Here is the conclusion",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			thoughtSummary, text := extractTextAndThoughtSummaryFromGeminiParts(tc.parts, tc.responseMode)
			if thoughtSummary != tc.expectedThoughtSummary {
				t.Errorf("thought summary result of extractTextAndThoughtSummaryFromGeminiParts() = %q, want %q", thoughtSummary, tc.expectedText)
			}
			if text != tc.expectedText {
				t.Errorf("text result of extractTextAndThoughtSummaryFromGeminiParts() = %q, want %q", text, tc.expectedText)
			}
		})
	}
}

func TestOpenAIReqToGeminiGenerationConfigWithJsonSchemaToGemini(t *testing.T) {
	tests := []struct {
		name                     string
		input                    *openai.ChatCompletionRequest
		requestModel             internalapi.RequestModel
		expectedGenerationConfig *genai.GenerationConfig
		expectedResponseMode     geminiResponseMode
		expectedErrMsg           string
	}{
		{
			name: "json schema for older gemini model (uses jsonSchemaToGemini)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"name": {"type": "string"},
									"age": {"type": "number"}
								},
								"required": ["name", "age"]
							}`),
						},
					},
				},
			},
			requestModel: "gemini-2.0-flash", // older model that doesn't support native JSON schema
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema: &genai.Schema{
					Type: "object",
					Properties: map[string]*genai.Schema{
						"name": {Type: "string"},
						"age":  {Type: "number"},
					},
					Required: []string{"name", "age"},
				},
			},
			expectedResponseMode: responseModeJSON,
		},
		{
			name: "complex json schema with nested objects (uses jsonSchemaToGemini)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"user": {
										"type": "object",
										"properties": {
											"name": {"type": "string"},
											"contact": {
												"type": "object",
												"properties": {
													"email": {"type": "string"},
													"phone": {"type": "string"}
												}
											}
										}
									},
									"items": {
										"type": "array",
										"items": {
											"type": "object",
											"properties": {
												"id": {"type": "number"},
												"description": {"type": "string"}
											}
										}
									}
								},
								"required": ["user", "items"]
							}`),
						},
					},
				},
			},
			requestModel: "gemini-2.0-flash", // older model
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema: &genai.Schema{
					Type: "object",
					Properties: map[string]*genai.Schema{
						"user": {
							Type: "object",
							Properties: map[string]*genai.Schema{
								"name": {Type: "string"},
								"contact": {
									Type: "object",
									Properties: map[string]*genai.Schema{
										"email": {Type: "string"},
										"phone": {Type: "string"},
									},
								},
							},
						},
						"items": {
							Type: "array",
							Items: &genai.Schema{
								Type: "object",
								Properties: map[string]*genai.Schema{
									"id":          {Type: "number"},
									"description": {Type: "string"},
								},
							},
						},
					},
					Required: []string{"user", "items"},
				},
			},
			expectedResponseMode: responseModeJSON,
		},
		{
			name: "json schema with anyOf (uses jsonSchemaToGemini)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"value": {
										"anyOf": [
											{"type": "string"},
											{"type": "number"}
										]
									}
								}
							}`),
						},
					},
				},
			},
			requestModel: "gemini-2.0-flash",
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema: &genai.Schema{
					Type: "object",
					Properties: map[string]*genai.Schema{
						"value": {
							AnyOf: []*genai.Schema{
								{Type: "string"},
								{Type: "number"},
							},
						},
					},
				},
			},
			expectedResponseMode: responseModeJSON,
		},
		{
			name: "json schema with nullable type (uses jsonSchemaToGemini)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"optional_field": {
										"anyOf": [
											{"type": "string"},
											{"type": "null"}
										]
									}
								}
							}`),
						},
					},
				},
			},
			requestModel: "gemini-2.0-flash",
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema: &genai.Schema{
					Type: "object",
					Properties: map[string]*genai.Schema{
						"optional_field": {
							AnyOf: []*genai.Schema{
								{Type: "string"},
							},
							Nullable: ptr.To(true),
						},
					},
				},
			},
			expectedResponseMode: responseModeJSON,
		},
		{
			name: "json schema with $ref (uses jsonSchemaToGemini)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"user": {"$ref": "#/$defs/User"}
								},
								"$defs": {
									"User": {
										"type": "object",
										"properties": {
											"name": {"type": "string"},
											"age": {"type": "number"}
										}
									}
								}
							}`),
						},
					},
				},
			},
			requestModel: "gemini-2.0-flash",
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema: &genai.Schema{
					Type: "object",
					Properties: map[string]*genai.Schema{
						"user": {
							Type: "object",
							Properties: map[string]*genai.Schema{
								"name": {Type: "string"},
								"age":  {Type: "number"},
							},
						},
					},
				},
			},
			expectedResponseMode: responseModeJSON,
		},
		{
			name: "invalid json schema causes jsonSchemaToGemini error",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"invalid_ref": {"$ref": "#/nonexistent/path"}
								}
							}`),
						},
					},
				},
			},
			requestModel:   "gemini-2.0-flash",
			expectedErrMsg: "invalid JSON schema",
		},
		{
			name: "complex nested schema with arrays and refs (uses jsonSchemaToGemini)",
			input: &openai.ChatCompletionRequest{
				ResponseFormat: &openai.ChatCompletionResponseFormatUnion{
					OfJSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
						Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
						JSONSchema: openai.ChatCompletionResponseFormatJSONSchemaJSONSchema{
							Schema: json.RawMessage(`{
								"type": "object",
								"properties": {
									"steps": {
										"type": "array",
										"items": {"$ref": "#/$defs/Step"}
									},
									"final_answer": {"type": "string"}
								},
								"required": ["steps", "final_answer"],
								"$defs": {
									"Step": {
										"type": "object",
										"properties": {
											"explanation": {"type": "string"},
											"output": {"type": "string"}
										},
										"required": ["explanation", "output"]
									}
								}
							}`),
						},
					},
				},
			},
			requestModel: "gemini-2.0-flash",
			expectedGenerationConfig: &genai.GenerationConfig{
				ResponseMIMEType: "application/json",
				ResponseSchema: &genai.Schema{
					Type: "object",
					Properties: map[string]*genai.Schema{
						"steps": {
							Type: "array",
							Items: &genai.Schema{
								Type: "object",
								Properties: map[string]*genai.Schema{
									"explanation": {Type: "string"},
									"output":      {Type: "string"},
								},
								Required: []string{"explanation", "output"},
							},
						},
						"final_answer": {Type: "string"},
					},
					Required: []string{"steps", "final_answer"},
				},
			},
			expectedResponseMode: responseModeJSON,
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			got, responseMode, err := openAIReqToGeminiGenerationConfig(tc.input, tc.requestModel)
			if tc.expectedErrMsg != "" {
				require.ErrorContains(t, err, tc.expectedErrMsg)
			} else {
				require.NoError(t, err)

				if diff := cmp.Diff(tc.expectedGenerationConfig, got, cmpopts.IgnoreUnexported(genai.GenerationConfig{}, genai.Schema{})); diff != "" {
					t.Errorf("GenerationConfig mismatch (-want +got):\n%s", diff)
				}

				if responseMode != tc.expectedResponseMode {
					t.Errorf("geminiResponseMode mismatch: got %v, want %v", responseMode, tc.expectedResponseMode)
				}
			}
		})
	}
}

func TestMapDetailMediaResolution(t *testing.T) {
	tests := []struct {
		name               string
		detail             openai.ChatCompletionContentPartImageImageURLDetail
		expectedResolution genai.PartMediaResolutionLevel
		expectedErrorMsg   string
	}{
		{
			name:               "low detail",
			detail:             openai.ChatCompletionContentPartImageImageURLDetailLow,
			expectedResolution: genai.PartMediaResolutionLevelMediaResolutionLow,
		},
		{
			name:               "medium detail",
			detail:             openai.ChatCompletionContentPartImageImageURLDetailMedium,
			expectedResolution: genai.PartMediaResolutionLevelMediaResolutionMedium,
		},
		{
			name:               "high detail",
			detail:             openai.ChatCompletionContentPartImageImageURLDetailHigh,
			expectedResolution: genai.PartMediaResolutionLevelMediaResolutionHigh,
		},
		{
			name:               "auto detail - returns unspecified for default behavior",
			detail:             openai.ChatCompletionContentPartImageImageURLDetailAuto,
			expectedResolution: genai.PartMediaResolutionLevelMediaResolutionUnspecified,
		},
		{
			name:             "empty string detail",
			detail:           openai.ChatCompletionContentPartImageImageURLDetail(""),
			expectedErrorMsg: "unsupported detail level:",
		},
		{
			name:             "unknown detail value",
			detail:           openai.ChatCompletionContentPartImageImageURLDetail("unknown"),
			expectedErrorMsg: "unsupported detail level:",
		},
		{
			name:             "invalid detail value",
			detail:           openai.ChatCompletionContentPartImageImageURLDetail("invalid_value"),
			expectedErrorMsg: "unsupported detail level:",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			resolution, err := mapDetailMediaResolution(tc.detail)

			if tc.expectedErrorMsg != "" {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrorMsg)
				assert.Equal(t, genai.PartMediaResolutionLevel(""), resolution)
			} else {
				require.NoError(t, err)
				assert.Equal(t, tc.expectedResolution, resolution)
			}
		})
	}
}

func TestMapReasoningEffortToThinkingLevel(t *testing.T) {
	tests := []struct {
		name             string
		reasoningEffort  openaigo.ReasoningEffort
		expectedThinking genai.ThinkingLevel
		expectedErrorMsg string
	}{
		{
			name:             "low effort maps to ThinkingLevelLow",
			reasoningEffort:  openaigo.ReasoningEffortLow,
			expectedThinking: genai.ThinkingLevelLow,
		},
		{
			name:             "medium effort maps to ThinkingLevelHigh",
			reasoningEffort:  openaigo.ReasoningEffortMedium,
			expectedThinking: genai.ThinkingLevelHigh,
		},
		{
			name:             "minimal effort - not supported",
			reasoningEffort:  openaigo.ReasoningEffortMinimal,
			expectedErrorMsg: "unsupported reasoning effort level:",
		},
		{
			name:             "high effort - not supported",
			reasoningEffort:  openaigo.ReasoningEffortHigh,
			expectedErrorMsg: "unsupported reasoning effort level:",
		},
		{
			name:             "empty effort - not supported",
			reasoningEffort:  "",
			expectedErrorMsg: "unsupported reasoning effort level:",
		},
		{
			name:             "unknown effort - not supported",
			reasoningEffort:  "unknown",
			expectedErrorMsg: "unsupported reasoning effort level:",
		},
	}

	for _, tc := range tests {
		t.Run(tc.name, func(t *testing.T) {
			thinking, err := mapReasoningEffortToThinkingLevel(tc.reasoningEffort)

			if tc.expectedErrorMsg != "" {
				require.Error(t, err)
				assert.Contains(t, err.Error(), tc.expectedErrorMsg)
				assert.Equal(t, genai.ThinkingLevel(""), thinking)
			} else {
				require.NoError(t, err)
				assert.Equal(t, tc.expectedThinking, thinking)
			}
		})
	}
}
