package com.hexinfo.dmpro.dev.data.transfer.model;

import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.annotation.*;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.hexinfo.dmpro.dev.data.transfer.common.enums.DevDataTransferListStatusEnum;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.springframework.validation.annotation.Validated;

import java.util.Date;
import java.util.List;

/**
 * 表导入任务表
 *
 * @author yyh.huang
 * @date 2023-10-25 19:15:04
 */
@Data
@Validated
@TableName("TD_DEV_DATA_IMPORT_LIST")
@EqualsAndHashCode(callSuper = true)
@ApiModel(value = "表导入任务表")
public class DevDataImportList extends Model<DevDataImportList> {
    private static final long serialVersionUID = 1L;

    /**
     * ID
     */
    @TableId(type = IdType.ASSIGN_UUID)
    @ApiModelProperty(value = "ID")
    private String id;
    /**
     * 该表级任务所属导入任务
     */
    @ApiModelProperty(value = "该表级任务所属导入任务")
    private String taskCode;
    /**
     * 库名
     */
    @ApiModelProperty(value = "库名")
    private String dbName;
    /**
     * 表名
     */
    @ApiModelProperty(value = "表名")
    private String tableName;
    /**
     * 导入文件名称
     */
    @ApiModelProperty(value = "导入文件名称")
    private String fileName;
    /**
     * 导入文件大小
     */
    @ApiModelProperty(value = "导入文件大小")
    private String fileSize;
    /**
     * 导入文件转储位置
     */
    @ApiModelProperty(value = "导入文件转储位置")
    private String filePath;
    /**
     * 分区值
     */
    @ApiModelProperty(value = "分区值")
    private String dataFlag;
    /**
     * 是否使用FTP传输,Y是,N否
     */
    @ApiModelProperty(value = "是否使用FTP传输,Y是,N否")
    private String isFtpTransfer;
    /**
     * 文件是否被压缩,Y是,N否
     */
    @ApiModelProperty(value = "文件是否被压缩,Y是,N否")
    private String isFileCompress;
    /**
     * 是否存在信号文件,Y是,N否
     */
    @ApiModelProperty(value = "是否存在信号文件,Y是,N否")
    private String isCreateFlagFile;
    /**
     * 导入表任务状态
     */
    @ApiModelProperty(value = "导入表任务状态")
    private String importStatus;
    /**
     * 导入表开始时间
     */
    @ApiModelProperty(value = "导入表开始时间")
    private Date startTime;
    /**
     * 导入表结束时间
     */
    @ApiModelProperty(value = "导入表结束时间")
    private Date endTime;

    @ApiModelProperty(value = "错误信息")
    private String errorMessage;

    @ApiModelProperty(value = "执行任务AGENT")
    private String agentKey;

    @ApiModelProperty(value = "相关SQL内容")
    private String sqlContent;

    @ApiModelProperty(value = "失败次数")
    private Integer errorCount;

    /**
     * 创建人
     */
    @ApiModelProperty(value = "创建人")
    @TableField(fill = FieldFill.INSERT)
    private String createBy;
    /**
     * 创建时间
     */
    @ApiModelProperty(value = "创建时间")
    @TableField(fill = FieldFill.INSERT)
    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss" ,timezone = "GMT+8")
    private Date createTime;
    /**
     * 更新人
     */
    @ApiModelProperty(value = "更新人")
    @TableField(fill = FieldFill.INSERT_UPDATE)
    private String lastUpdateBy;
    /**
     * 更新时间
     */
    @ApiModelProperty(value = "更新时间")
    @TableField(fill = FieldFill.INSERT_UPDATE)
    @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss" ,timezone = "GMT+8")
    private Date lastUpdateTime;
    /**
     * 删除标识
     */
    @ApiModelProperty(value = "删除标识")
    @TableField(fill = FieldFill.INSERT)
    private String del;

    /**
     * 搜索时间范围
     */
    @TableField(exist = false)
    private List<String> createTimeRange;

    @TableField(exist = false)
    private String hdfsTmpPath;

    @TableField(exist = false)
    private String ftpHost;

    @TableField(exist = false)
    private Integer ftpPort;

    @TableField(exist = false)
    private String ftpUserName;

    @TableField(exist = false)
    private String ftpPassword;

    @TableField(exist = false)
    private String hiveJdbcUrl;

    @TableField(exist = false)
    private String hiveUserName;

    @TableField(exist = false)
    private String hivePassword;

    @TableField(exist = false)
    private List<String> hdfsNameNodeHosts;

    @TableField(exist = false)
    private Integer hdfsPort;

    @TableField(exist = false)
    private String impalaSheelAddress;

    @TableField(exist = false)
    private String impalaUserName;

    @TableField(exist = false)
    private String impalaPassword;

    public String getImportStatusName() {
        return DevDataTransferListStatusEnum.valueOf(importStatus).getName();
    }

    public String getStartTimeString() {
        if (startTime != null){
            return DateUtil.formatDateTime(startTime);
        }
        return "";
    }

    public String getEndTimeString() {
        if (endTime != null){
            return DateUtil.formatDateTime(endTime);
        }
        return "";
    }

    public String getTimeConsuming() {
        if (startTime != null && endTime != null) {
            long seconds = DateUtil.between(startTime, endTime, DateUnit.SECOND);
            if (seconds < 60) {
                return seconds + "秒";
            } else {
                return StrUtil.format("{}分{}秒", seconds / 60, seconds % 60);
            }
        }
        return "-";
    }

    public String getDbAndTableName() {
        return dbName + "." + tableName;
    }

}
