package avicit.bdp.dcs.job.dto;

import avicit.bdp.dcs.stream.dto.KafkaJsonDto;
import io.swagger.annotations.ApiModelProperty;

import javax.persistence.Entity;
import java.util.List;

/**
 * @金航数码科技有限责任公司
 * @作者：developer
 * @邮箱：developer@avic-digital.com
 * @创建时间： 2020-08-04 11:32
 * @类说明：
 * @修改记录：
 */
@Entity
public class JobDetailDto {

    private static final long serialVersionUID = 1L;

    @ApiModelProperty(value = "id")
    private String id;

    @ApiModelProperty(value = "任务名称")
    private String name;

    @ApiModelProperty(value = "源数据源")
    private String srcDatasourceId;

    @ApiModelProperty(value = "源数据源")
    private String srcDatasourceName;

    @ApiModelProperty(value = "源数据源类型")
    private String srcDatasourceType;

    @ApiModelProperty(value = "目标数据源")
    private String dstDatasourceId;

    @ApiModelProperty(value = "目标数据源")
    private String dstDatasourceName;

    @ApiModelProperty(value = "目标数据源类型")
    private String dstDatasourceType;

    @ApiModelProperty(value = "迁移类型，表结构迁移：1，数据迁移：2，两者都有：1,2")
    private String transferType;

    @ApiModelProperty(value = "同步初始化：否：0，是：1")
    private String tableInit;

    @ApiModelProperty(value = "目标已存在表的处理模式，预检查并报错拦截：0，覆盖目标表：1,保留目标表：2")
    private String processMode;

    @ApiModelProperty(value = "选择的表")
    private List<TableDto> selectedTableList;

    @ApiModelProperty(value = "待同步目录")
    private String selectedFile;

    @ApiModelProperty(value = "kafka配置json数据")
    private KafkaJsonDto kafkaJsonDto;

    public String getSrcDatasourceType() {
        return srcDatasourceType;
    }

    public void setSrcDatasourceType(String srcDatasourceType) {
        this.srcDatasourceType = srcDatasourceType;
    }

    public String getDstDatasourceType() {
        return dstDatasourceType;
    }

    public void setDstDatasourceType(String dstDatasourceType) {
        this.dstDatasourceType = dstDatasourceType;
    }

    public String getId() {
        return id;
    }

    public KafkaJsonDto getKafkaJsonDto() {
        return kafkaJsonDto;
    }

    public void setKafkaJsonDto(KafkaJsonDto kafkaJsonDto) {
        this.kafkaJsonDto = kafkaJsonDto;
    }

    public void setId(String id) {
        this.id = id;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public String getSrcDatasourceName() {
        return srcDatasourceName;
    }

    public void setSrcDatasourceName(String srcDatasourceName) {
        this.srcDatasourceName = srcDatasourceName;
    }

    public String getDstDatasourceName() {
        return dstDatasourceName;
    }

    public void setDstDatasourceName(String dstDatasourceName) {
        this.dstDatasourceName = dstDatasourceName;
    }

    public String getTransferType() {
        return transferType;
    }

    public void setTransferType(String transferType) {
        this.transferType = transferType;
    }

    public String getTableInit() {
        return tableInit;
    }

    public void setTableInit(String tableInit) {
        this.tableInit = tableInit;
    }

    public String getProcessMode() {
        return processMode;
    }

    public void setProcessMode(String processMode) {
        this.processMode = processMode;
    }

    public List<TableDto> getSelectedTableList() {
        return selectedTableList;
    }

    public void setSelectedTableList(List<TableDto> selectedTableList) {
        this.selectedTableList = selectedTableList;
    }

    public String getSrcDatasourceId() {
        return srcDatasourceId;
    }

    public void setSrcDatasourceId(String srcDatasourceId) {
        this.srcDatasourceId = srcDatasourceId;
    }

    public String getDstDatasourceId() {
        return dstDatasourceId;
    }

    public void setDstDatasourceId(String dstDatasourceId) {
        this.dstDatasourceId = dstDatasourceId;
    }

    public String getSelectedFile() {
        return selectedFile;
    }

    public void setSelectedFile(String selectedFile) {
        this.selectedFile = selectedFile;
    }

}