import type {
	IExecuteFunctions,
	INodeExecutionData,
	INodeType,
	INodeTypeDescription,
} from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';

export class GeminiThinking implements INodeType {
	description: INodeTypeDescription = {
		displayName: 'Gemini Thinking',
		name: 'geminiThinking',
		icon: 'file:gemini.svg',
		group: ['ai'],
		version: 1,
		description: 'Google Gemini Chat Model with thinking capabilities',
		defaults: {
			name: 'Gemini Thinking',
		},
		inputs: [NodeConnectionType.Main],
		outputs: [NodeConnectionType.Main],
		credentials: [
			{
				name: 'geminiApi',
				required: true,
			},
		],
		properties: [
			{
				displayName: 'Model',
				name: 'model',
				type: 'options',
				options: [
					{
						name: 'Gemini 1.5 Pro',
						value: 'gemini-1.5-pro',
					},
					{
						name: 'Gemini 1.5 Flash',
						value: 'gemini-1.5-flash',
					},
					{
						name: 'Gemini 1.5 Flash-8B',
						value: 'gemini-1.5-flash-8b',
					},
					{
						name: 'Gemini 2.0 Flash Experimental',
						value: 'gemini-2.0-flash-exp',
					},
				],
				default: 'gemini-1.5-pro',
				description: 'The Gemini model to use',
			},
			{
				displayName: 'Message',
				name: 'message',
				type: 'string',
				typeOptions: {
					rows: 4,
				},
				default: '',
				placeholder: 'Enter your message here...',
				description: 'The message to send to Gemini',
			},
			{
				displayName: 'Enable Thinking',
				name: 'enableThinking',
				type: 'boolean',
				default: false,
				description: 'Whether to enable thinking mode for more detailed reasoning',
			},
			{
				displayName: 'System Instruction',
				name: 'systemInstruction',
				type: 'string',
				typeOptions: {
					rows: 3,
				},
				default: '',
				placeholder: 'You are a helpful assistant...',
				description: 'System instruction to guide the model behavior',
				displayOptions: {
					show: {},
				},
			},
			{
				displayName: 'Temperature',
				name: 'temperature',
				type: 'number',
				typeOptions: {
					minValue: 0,
					maxValue: 2,
					numberStepSize: 0.1,
				},
				default: 1,
				description: 'Controls randomness in the output. Higher values make output more random.',
			},
			{
				displayName: 'Max Output Tokens',
				name: 'maxOutputTokens',
				type: 'number',
				typeOptions: {
					minValue: 1,
					maxValue: 8192,
				},
				default: 2048,
				description: 'Maximum number of tokens to generate',
			},
			{
				displayName: 'Top P',
				name: 'topP',
				type: 'number',
				typeOptions: {
					minValue: 0,
					maxValue: 1,
					numberStepSize: 0.1,
				},
				default: 0.95,
				description: 'Controls diversity via nucleus sampling',
			},
			{
				displayName: 'Top K',
				name: 'topK',
				type: 'number',
				typeOptions: {
					minValue: 1,
					maxValue: 100,
				},
				default: 40,
				description: 'Controls diversity by limiting the number of tokens considered',
			},
			{
				displayName: 'Custom Parameters',
				name: 'customParameters',
				type: 'fixedCollection',
				placeholder: 'Add Parameter',
				default: {},
				typeOptions: {
					multipleValues: true,
				},
				description: 'Additional custom parameters to send with the request',
				options: [
					{
						name: 'parameter',
						displayName: 'Parameter',
						values: [
							{
								displayName: 'Name',
								name: 'name',
								type: 'string',
								default: '',
								description: 'Parameter name',
							},
							{
								displayName: 'Value',
								name: 'value',
								type: 'string',
								default: '',
								description: 'Parameter value',
							},
						],
					},
				],
			},
		],
	};

	async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
		const items = this.getInputData();
		const returnData: INodeExecutionData[] = [];

		for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
			try {
				const model = this.getNodeParameter('model', itemIndex) as string;
				const message = this.getNodeParameter('message', itemIndex) as string;
				const enableThinking = this.getNodeParameter('enableThinking', itemIndex) as boolean;
				const systemInstruction = this.getNodeParameter('systemInstruction', itemIndex) as string;
				const temperature = this.getNodeParameter('temperature', itemIndex) as number;
				const maxOutputTokens = this.getNodeParameter('maxOutputTokens', itemIndex) as number;
				const topP = this.getNodeParameter('topP', itemIndex) as number;
				const topK = this.getNodeParameter('topK', itemIndex) as number;
				const customParameters = this.getNodeParameter('customParameters', itemIndex) as {
					parameter: Array<{ name: string; value: string }>;
				};

				// 构建请求体
				const requestBody: any = {
					contents: [
						{
							parts: [
								{
									text: message,
								},
							],
						},
					],
					generationConfig: {
						temperature,
						maxOutputTokens,
						topP,
						topK,
					},
				};

				// 添加系统指令
				if (systemInstruction) {
					requestBody.systemInstruction = {
						parts: [
							{
								text: systemInstruction,
							},
						],
					};
				}

				// 添加 thinking 参数
				if (enableThinking) {
					requestBody.generationConfig.responseModalities = ['TEXT'];
					requestBody.generationConfig.responseSchema = {
						type: 'object',
						properties: {
							thinking: {
								type: 'string',
								description: 'Your internal reasoning and thought process',
							},
							response: {
								type: 'string',
								description: 'Your final response to the user',
							},
						},
						required: ['thinking', 'response'],
					};
				}

				// 添加自定义参数
				if (customParameters?.parameter) {
					for (const param of customParameters.parameter) {
						if (param.name && param.value) {
							try {
								// 尝试解析 JSON 值
								const parsedValue = JSON.parse(param.value);
								requestBody[param.name] = parsedValue;
							} catch {
								// 如果不是 JSON，则作为字符串处理
								requestBody[param.name] = param.value;
							}
						}
					}
				}

				// 发送请求到 Gemini API
				const response = await this.helpers.httpRequestWithAuthentication.call(
					this,
					'geminiApi',
					{
						method: 'POST',
						url: `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent`,
						body: requestBody,
						json: true,
					},
				);

				// 处理响应
				let responseText = '';
				let thinkingText = '';

				if (response.candidates && response.candidates[0] && response.candidates[0].content) {
					const content = response.candidates[0].content.parts[0].text;
					
					if (enableThinking) {
						try {
							const parsedContent = JSON.parse(content);
							thinkingText = parsedContent.thinking || '';
							responseText = parsedContent.response || content;
						} catch {
							responseText = content;
						}
					} else {
						responseText = content;
					}
				}

				const outputData: INodeExecutionData = {
					json: {
						model,
						message,
						response: responseText,
						...(enableThinking && { thinking: thinkingText }),
						usage: response.usageMetadata || {},
						rawResponse: response,
					},
					pairedItem: itemIndex,
				};

				returnData.push(outputData);
			} catch (error) {
				if (this.continueOnFail()) {
					returnData.push({
						json: { error: error.message },
						pairedItem: itemIndex,
					});
				} else {
					throw new NodeOperationError(this.getNode(), error, {
						itemIndex,
					});
				}
			}
		}

		return [returnData];
	}
} 