package org.sxp.modules.dataworks.dto;

import com.alibaba.fastjson.JSONObject;
import lombok.Data;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.sxp.common.validator.group.*;

import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import java.io.Serializable;
import java.util.Date;
import java.util.List;

/**
 * 
 * 如果同时想发布到rpc，请拷贝至公共依赖模块，用于rpc之间数据交互
 * @author 沈兴平
 * @date 2021/12/30
 */
@Data
@ApiModel(value = "",description = "沈兴平-2021/12/30")
public class SyncDTO implements Serializable {
	private static final long serialVersionUID = 1L;

	/**
	 * 主键
	 */
	@NotNull(message="id 不能为空", groups = {UpdateGroup.class})
	@ApiModelProperty(value = "主键-主键")
	private String id;

	/**
	 * 名称
	 */
	@ApiModelProperty(value = "名称")
	@NotBlank(message="名称 不能为空", groups = {UpdateGroup.class,AddGroup.class})
	private String name;


	/**
	 * 来源库类型
	 */
	@ApiModelProperty(value = "来源库类型")
	@NotNull(message="来源库类型 不能为空", groups = {UpdateGroup.class,AddGroup.class})
	private String sourceDbType;

	/**
	 * 目标库类型
	 */
	@ApiModelProperty(value = "目标库类型")
	@NotNull(message="目标库类型 不能为空", groups = {UpdateGroup.class,AddGroup.class})
	private String targetDbType;

	/**
	 * 任务并发数
	 */
	@ApiModelProperty(value = "任务并发数")
	private Integer syncNum;
	/**
	 * 同步速率(单位M/s)
	 */
	@ApiModelProperty(value = "同步速率(单位MB/s)")
	private Integer syncRate;
	/**
	 * 读取行数的限制
	 */
	@ApiModelProperty(value = "读取行数的限制")
	private Integer syncRecord;

	/**
	 * XXL调度任务ID
	 */
	@ApiModelProperty(value = "调度任务ID")
	private Integer jobinfoId;
	private String jobinfoName;
	/**
	 * datax任务表达式
	 */
	@ApiModelProperty(value = "datax任务表达式")
	private String dataxJson;

	/**
	 * 创建时间
	 */
	@ApiModelProperty(value = "创建时间")
	private Date createTime;

	/**
	 * 修改时间
	 */
	@ApiModelProperty(value = "修改时间")
	private Date updateTime;

	/**
	 * 创建人
	 */
	@ApiModelProperty(value = "创建人")
	private Long createUser;

	/**
	 * 修改人
	 */
	@ApiModelProperty(value = "修改人")
	private Long updateUser;

	/**
	 * 资源分组
	 */
	@ApiModelProperty(value = "资源分组")
	@NotBlank(message = "资源分组不能为空")
	private String spaceId;


	/**
	 * 关系型数据库字段-------------------------------
	 */
	@ApiModelProperty(value = "来源库")
	private String sourceDb;
	@ApiModelProperty(value = "来源表")
	private String sourceTable;
	@ApiModelProperty(value = "目标库")
	private String targetDb;
	@ApiModelProperty(value = "目标表")
	private String targetTable;
	@ApiModelProperty(value = "写模式(insert/update/replace)")
	private String writeMode;
	@ApiModelProperty(value = "过滤sql")
	private String whereSql;
	@ApiModelProperty(value = "源映射字段json数组")
	private List<JSONObject> sourceField;
	@ApiModelProperty(value = "目标映射字段json数组")
	private List<JSONObject> targetField;
	@ApiModelProperty(value = "自定义SQL")
	private String querySql;
	private String preSql;
	private String postSql;
	private String splitPk;
	private Integer errorLimit;
	/**
	 * 关系型数据库字段-------------------------------
	 */
	/**
	 * 非关系型数据库字段
	 */
	private String sourceFieldStr;
	private String targetFieldStr;
	/**
	 * 非关系型数据库字段
	 */
	/**
	 * mongodb
	 */
	// 下限
	private String lowerBound;
	// 上限
	private String upperBound;
	private Boolean isObjectId;

	/**
	 * hive - 来源库
	 */
	@NotNull(message="数据源文件类型 不能为空", groups = {HiveReaderGroup.class})
	private String sourceFileType;
	@NotNull(message="数据源存储路径 不能为空", groups = {HiveReaderGroup.class})
	private String sourcePath;
	@NotNull(message="数据源字段分隔符 不能为空", groups = {HiveReaderGroup.class})
	private String sourceFieldDelimiter;
	private String sourceCompress;
	private String sourceHadoopConfig;
	private String sourceEncoding;
	private Boolean sourceHaveKerberos;
	private String sourceKerberosKeytabFilePath;
	private String sourceKerberosPrincipal;
	/**
	 * hive - 目标库
	 */
	@NotNull(message="目标源文件类型 不能为空", groups = {HiveWriterGroup.class})
	private String targetFileType;
	@NotNull(message="目标源存储路径 不能为空", groups = {HiveWriterGroup.class})
	private String targetPath;
	@NotNull(message="目标源文件名 不能为空", groups = {HiveWriterGroup.class})
	private String targetFileName;
	@NotNull(message="目标源字段分隔符 不能为空", groups = {HiveWriterGroup.class})
	private String targetFieldDelimiter;
	private String targetCompress;
	private String targetHadoopConfig;
	private String targetEncoding;
	private Boolean targetHaveKerberos;
	private String targetKerberosKeytabFilePath;
	private String targetKerberosPrincipal;

	/**
	 * hbase - 来源库
	 */
	@NotNull(message="来源库版本 不能为空", groups = {HbaseReaderGroup.class})
	private String sourceDbVersion;
	@NotNull(message="读取模式 不能为空", groups = {HbaseReaderGroup.class})
	private String sourceMode;
	/**
	 * hbase - 目标库
	 */
	@NotNull(message="目标库版本 不能为空", groups = {HbaseWriterGroup.class})
	private String targetDbVersion;
	private String targetHbaseConfig;
	private String targetMode;
	@NotNull(message="rowkey列 不能为空", groups = {HbaseWriterGroup.class})
	private String rowkeyColumn;
	private String versionColumn;
}
