import { BlockEnum, NodeDefault } from '@/components/workflow/types'
import { ALL_COMPLETION_AVAILABLE_BLOCKS } from '../../constants'
import { LLMNodeType } from './types'

export const getDefaultModel = () => {
  return {
    provider: '',
    name: '',
    mode: 'chat',
    completion_params: {
      temperature: 0.3
    }
  }
}

export const getLlmOutputsList = () => {
  return [
    {
      variable: 'output',
      value: '大模型输出',
      type: 'string',
      value_selector: []
    }
  ]
}

export const getLlmDefaultParams = (): any => {
  return {
    model: {
      provider: '',
      name: 'deepseek-r1',
      mode: '',
      completion_params: {
        temperature: 0.3 // 温度
      }
    },
    // 输入
    inputList: [
      {
        variable: 'input',
        value: undefined,
        type: 'cite',
        value_selector: []
      }
    ],
    userInput: '', // 用户输入
    callWord: '', // 提示词
    // 输出
    outputList: [
      {
        variable: 'output',
        value: '大模型输出',
        type: 'string',
        value_selector: []
      }
    ]
  }
}

const nodeDefault: NodeDefault<LLMNodeType> = {
  defaultValue: { ...getLlmDefaultParams() },
  getAvailablePrevNodes() {
    const nodes = ALL_COMPLETION_AVAILABLE_BLOCKS.filter(type => type !== BlockEnum.End)
    return nodes
  },
  getAvailableNextNodes() {
    const nodes = ALL_COMPLETION_AVAILABLE_BLOCKS
    return nodes
  },
  checkValid(payload: LLMNodeType) {
    let isValid = true
    let errorMessages = ''
    if (payload.type) {
      isValid = true
      errorMessages = ''
    }
    return {
      isValid,
      errorMessage: errorMessages
    }
  }
}

export default nodeDefault
