import React, { useMemo, useState } from 'react';
import { useContextSelector } from 'use-context-selector';

import { DatasetSearchModeEnum } from '@/common';
import { DatasetParamsProps } from '@/components/workflow/DatasetParamsModal';
import { useSystemStore } from '@/store/useSystemStore';
import { FlowNodeTypeEnum, NodeInputKeyEnum } from '@/types/workflow/constant';
import { getWebLLMModel } from '@/utils/system/utils';

import { WorkflowContext } from '../../../../../context';

import SearchDatasetParamsConfig from './SearchDatasetParamsConfig';

import type { RenderInputProps } from '../type';

const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
  const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
  const nodeList = useContextSelector(WorkflowContext, (v) => v.nodeList);

  const { llmModelList } = useSystemStore();

  const defaultData: DatasetParamsProps = useMemo(() => {
    const defaultData: DatasetParamsProps = {
      searchMode: DatasetSearchModeEnum.embedding,
      limit: 5,
      similarity: 0.5,
      usingReRank: false,
      datasetSearchUsingExtensionQuery: true,
      datasetSearchExtensionModel: llmModelList[0]?.model,
      datasetSearchExtensionBg: '',
      datasetSearchUsingKnowledgeGraph: false,
      datasetSearchUsingChunkFilter: false,
    };
    inputs.forEach((input) => {
      // @ts-ignore
      if (defaultData[input.key] !== undefined) {
        if (Object.hasOwn(defaultData, input.key)) {
          (defaultData as any)[input.key] = input.value ?? (defaultData as any)[input.key];
        }
      }
    });
    return defaultData;
  }, [inputs, llmModelList]);

  const [data, setData] = useState<DatasetParamsProps>(defaultData);

  const tokenLimit = useMemo(() => {
    let maxTokens = 13000;

    nodeList.forEach((item) => {
      if ([FlowNodeTypeEnum.chatNode, FlowNodeTypeEnum.tools].includes(item.flowNodeType)) {
        const model =
          item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
        const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 13000;

        maxTokens = Math.max(maxTokens, quoteMaxToken);
      }
    });

    return maxTokens;
  }, [nodeList]);

  const Render = useMemo(() => {
    return (
      <SearchDatasetParamsConfig
        {...data}
        maxTokens={tokenLimit}
        onDataChange={(e) => {
          setData(e);
          Object.keys(e).forEach((key) => {
            const item = inputs.find((input) => input.key === key);
            if (!item) return;
            onChangeNode({
              nodeId,
              type: 'updateInput',
              key,
              value: {
                ...item,
                // @ts-ignore
                value: e[key],
              },
            });
          });
        }}
      />
    );
  }, [data, inputs, nodeId, onChangeNode, tokenLimit]);

  return Render;
};

export default React.memo(SelectDatasetParam);
