import { t } from '@/locales/i18n';
import { LLMModelTypeEnum } from '@/types/ai/constants';

import {
  FlowNodeInputTypeEnum,
  FlowNodeOutputTypeEnum,
  FlowNodeTemplateTypeEnum,
  FlowNodeTypeEnum,
  NodeInputKeyEnum,
  NodeOutputKeyEnum,
  WorkflowIOValueTypeEnum,
} from '../../constant';
import { FlowNodeTemplateType } from '../../node';
import {
  Input_Template_History,
  Input_Template_SelectAIModel,
  Input_Template_UserChatInput,
} from '../input';
import { getHandleConfig } from '../utils';

export const AiQueryExtension: FlowNodeTemplateType = {
  id: FlowNodeTypeEnum.queryExtension,
  templateType: FlowNodeTemplateTypeEnum.other,
  flowNodeType: FlowNodeTypeEnum.queryExtension,
  sourceHandle: getHandleConfig(true, true, true, true),
  targetHandle: getHandleConfig(true, true, true, true),
  avatar: 'queryExtension.svg',
  name: t('question_optimization'),
  intro: t('workflow.question_optimization_desc'),
  showStatus: true,
  version: '481',
  isFolded: true,
  inputs: [
    {
      ...Input_Template_SelectAIModel,
      llmModelType: LLMModelTypeEnum.queryExtension,
    },
    {
      key: NodeInputKeyEnum.aiSystemPrompt,
      renderTypeList: [FlowNodeInputTypeEnum.textarea, FlowNodeInputTypeEnum.reference],
      label: t('core.app.edit.Query extension background prompt'),
      max: 300,
      valueType: WorkflowIOValueTypeEnum.string,
      description: t('core.app.edit.Query extension background tip'),
      placeholder: t('core.module.QueryExtension.placeholder'),
    },
    Input_Template_History,
    {
      ...Input_Template_UserChatInput,
      label: t('question_user_chat_input'),
      toolDescription: t('question_user_chat_input_desc'),
    },
  ],
  outputs: [
    {
      id: NodeOutputKeyEnum.text,
      key: NodeOutputKeyEnum.text,
      label: t('core.module.output.label.query extension result'),
      description: t('core.module.output.description.query extension result'),
      valueType: WorkflowIOValueTypeEnum.string,
      type: FlowNodeOutputTypeEnum.static,
    },
  ],
};
