import { ElMessage } from 'element-plus';
import { defineStore } from 'pinia';

import {
  CreateSuccessfully,
  getChunkUploadedInfo,
  getImportedFiles,
} from 'src/api/dataCenter';
import {
  CurrentDataset,
  UploadList,
  createFileChunk,
  usePlatformUploadStore,
} from 'stores/platform-upload';

const platformUploadStore = usePlatformUploadStore();

export interface ContinueUploadInfo extends CreateSuccessfully {
  upload_id: string;
}

export const usePlatformReUploadStore = defineStore('platformReUploadStore', {
  state: () => ({
    reUploadModal: false,
    uploadedChunksRecord: {} as Record<string, Record<string, string[]>>,
    reUploadProcessId: [] as string[],
    reUploadList: [] as UploadList[],
  }),

  actions: {
    setReUploadList(lists: UploadList[]) {
      this.reUploadList = lists;
      platformUploadStore.setReUploadList(lists);
    },
    openModal() {
      this.reUploadModal = true;
    },
    closeModal() {
      this.reUploadModal = false;
    },
    convertUploadInfoToDataset(uploadInfo: ContinueUploadInfo): CurrentDataset {
      return {
        id: uploadInfo.dataset_id,
        version_id: uploadInfo.version_id,
        project_id: uploadInfo.project_id,
      } as any as CurrentDataset;
    },
    mapperUploadId(dataset_id: string, upload_id: string, row_name: string) {
      platformUploadStore.platformUploadIdMapper[dataset_id] =
        platformUploadStore.platformUploadIdMapper[dataset_id] || {};
      platformUploadStore.platformUploadIdMapper[dataset_id][row_name] =
        upload_id;
    },
    async prepareUploadChunks(uploadInfo: ContinueUploadInfo) {
      const fileRow =
        platformUploadStore.platformUploadFileList[uploadInfo.dataset_id];
      platformUploadStore.platformDataSetFileContainer[uploadInfo.dataset_id] =
        platformUploadStore.platformDataSetFileContainer[
          uploadInfo.dataset_id
        ] || {
          fileChunks: {},
          fileNoChunks: {},
        };
      if (!fileRow.length) return;

      let filename;
      for (const rowFile of fileRow) {
        filename = rowFile.name;
        const chunkUploadInfo = await getChunkUploadedInfo({
          ...uploadInfo,
          upload_id:
            platformUploadStore.platformUploadIdMapper[uploadInfo.dataset_id][
              filename
            ],
        });
        const uploadedChunksOrders = chunkUploadInfo.data.ids.map((id) =>
          String(Number(id) - 1)
        );

        this.uploadedChunksRecord[uploadInfo.dataset_id] =
          this.uploadedChunksRecord[uploadInfo.dataset_id] || {};

        this.uploadedChunksRecord[uploadInfo.dataset_id][filename] =
          uploadedChunksOrders;
        const filesResponse = await getImportedFiles({
          dataset_id: uploadInfo.dataset_id,
          project_id: uploadInfo.project_id,
          version_id: uploadInfo.version_id,
        });
        const importedFiles = filesResponse.data.names;
        const isContain = importedFiles.includes(filename);
        if (
          rowFile.raw &&
          rowFile.raw.size <= platformUploadStore.platformChunkSize
        ) {
          platformUploadStore.platformDataSetFileContainer[
            uploadInfo.dataset_id
          ].fileNoChunks[filename] = [
            {
              file: rowFile.raw,
              size: rowFile.raw.size,
              percentage: isContain
                ? 100
                : uploadedChunksOrders.length == 1
                ? 100
                : 0,
              filename: rowFile.name,
            },
          ];
        } else {
          const fileChunkList = createFileChunk(
            rowFile.raw!,
            platformUploadStore.platformChunkSize
          );
          platformUploadStore.platformDataSetFileContainer[
            uploadInfo.dataset_id
          ].fileChunks[filename] = fileChunkList.map(({ file }, index) => ({
            chunk: file,
            hash: rowFile.name + '-' + index,
            index: index,
            filename: rowFile.name,
            percentage: isContain
              ? 100
              : uploadedChunksOrders.includes(String(index))
              ? 100
              : 0,
            size: file.size,
            rowName: rowFile.name,
            rowSize: rowFile.size!,
          }));
        }
      }
    },
    async startContinueUpload(uploadInfo: ContinueUploadInfo) {
      const dataset = this.convertUploadInfoToDataset(uploadInfo);
      this.reUploadProcessId.push(String(uploadInfo.dataset_id));
      platformUploadStore.setCurUploadDataset(dataset);
      platformUploadStore.setCurProcessDatasetId(dataset.id);
      platformUploadStore.clearVueUploadComponentFileList(dataset);
      await this.prepareUploadChunks(uploadInfo);
      await platformUploadStore.initPlatformUploadList();
      await this.continueUpload(dataset, this.uploadedChunksRecord);
    },

    async continueUpload(
      dataset: CurrentDataset,
      uploaded?: Record<string, Record<string, string[]>>
    ) {
      // debugger;
      try {
        await platformUploadStore.prepareUploadUrls(dataset);
        this.closeModal();
        platformUploadStore.openUploadModal();
      } catch (err) {
        ElMessage.error('请继续上传！');
      }
      await platformUploadStore.startUpload(dataset, uploaded);
    },
  },
});
