package org.zjvis.datascience.common.vo.dataset;

import static org.zjvis.datascience.common.util.ToolUtil.checkDuplicateName;

import cn.hutool.db.Entity;
import cn.hutool.db.meta.Table;
import java.io.Serializable;
import java.sql.JDBCType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.zjvis.datascience.common.constant.DatasetConstant;
import org.zjvis.datascience.common.util.FileImportUtil;
import org.zjvis.datascience.common.util.SqlUtil;

/**
 * @description 数据结果预览相关VO
 * @date 2021-12-08
 */
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class PreviewDatasetVO implements Serializable {

    private static final long serialVersionUID = 5790929085294187958L;

    private List<HeadVO> head;
    private List<Entity> data;
    private List<String> headers;
    private String name;
    /**
     * 仅csv导入返回预览数据时，将gp上存储的文件名返回
     */
    private String fileName;
    private String owner;

    public static PreviewDatasetVO of(List<Entity> data, String table, Table tableMeta,
        String dbType) {
        return PreviewDatasetVO.builder()
            .data(data)
            .name(table)
            .head(toHead(tableMeta, dbType))
            .build();
    }
    //
    public static List<HeadVO> toHead(Table table, String dbType) {
        if (table == null || CollectionUtils.isEmpty(table.getColumns())) {
            return Collections.emptyList();
        }

        return table.getColumns().stream()
            .map(column ->
                HeadVO.builder()
                    //head和body字段名统一转小写
                    .name(column.getName().toLowerCase())
                    .type(SqlUtil.changeType(dbType, column))
                    .build())
            .collect(Collectors.toList());
    }

    /**
     * for csv parsing
     *
     * @param fields
     * @return
     */
    public static List<HeadVO> toHead(String[] fields) {
        List<HeadVO> head = new ArrayList<>(fields.length);
        int i = 1;
        String name;
        List<String> nameList = new ArrayList<>();
        for (String field : fields) {
            if (StringUtils.isBlank(field)) {
                name = DatasetConstant.DEFAULT_FIELD_NAME + i++;
            } else {
                name = SqlUtil.cutStr(field, 63);
            }
            name = name.trim().replaceAll("\uFEFF", "");
            //特殊字符处理,head中不允许出现特殊字符
            if (name.trim().matches(DatasetConstant.SPECIAL_CHARACTER_REGEX)) {
                name = name.trim().replaceAll(DatasetConstant.SPECIAL_CHARACTER_REPLACE, "");
            }
            //待上传数据重名检查，并返回新的字段名
            name = checkDuplicateName(name, nameList, true);

            head.add(HeadVO.builder()
                .name(name)
                .type(StringUtils.lowerCase(JDBCType.VARCHAR.getName()))
                .build());
        }
        return head;
    }
    /**
     * for csv parsing
     *
     * @param fields
     * @return
     */
    public static List<HeadVO> toHead(List<String> fields, boolean needReplace) {
        List<HeadVO> head = new ArrayList<>(fields.size());
        int i = 1;
        String name;
        List<String> nameList = new ArrayList<>();
        for (String field : fields) {
            if (!needReplace){
                //需要替换，则直接使用fakeName
                name = DatasetConstant.DEFAULT_FIELD_NAME + i;
            }else{
                //否则进行多种检查
                if (StringUtils.isBlank(field)) {
                    name = DatasetConstant.DEFAULT_FIELD_NAME + i;
                } else {
                    //特殊字符处理,head中不允许出现特殊字符
                    name = FileImportUtil.illegalChar(field);
                }
                if (StringUtils.isBlank(name)) {
                    name = DatasetConstant.DEFAULT_FIELD_NAME + i;
                }

                //待上传数据重名检查，并返回新的字段名
                name = checkDuplicateName(name, nameList, true);

            }

            head.add(HeadVO.builder()
                .name(name.toLowerCase())
                .type(StringUtils.lowerCase(JDBCType.VARCHAR.getName()))
                .build());
            i++;
        }
        return head;
    }

    /**
     * for json parsing
     *
     * @param keys
     * @return
     */
    public static List<HeadVO> toHead(List<String> keys) {
        if (keys == null) {
            return Collections.emptyList();
        }
        List<HeadVO> head = new ArrayList<>(keys.size());
        String name;
        List<String> nameList = new ArrayList<>();
        int cnt = 1;
        for (int i = 0; i < keys.size(); i++) {
            if (StringUtils.isBlank(keys.get(i))) {
                name = DatasetConstant.DEFAULT_FIELD_NAME + cnt++;
            } else {
                name = keys.get(i);
            }
            //待上传数据重名检查，并返回新的字段名
            name = checkDuplicateName(name, nameList, true);
            head.add(HeadVO.builder()
                .name(name.trim())
                .type(StringUtils.lowerCase(JDBCType.VARCHAR.getName()))
                .build());
        }
        return head;
    }

}
