package cn.getech.data.development.service.impl;

import cn.getech.data.development.config.properties.BdpJobConfig;
import cn.getech.data.development.config.properties.DataSyncHiveConfig;
import cn.getech.data.development.constant.*;
import cn.getech.data.development.controller.datasyn.DbSql;
import cn.getech.data.development.entity.*;
import cn.getech.data.development.mapper.*;
import cn.getech.data.development.model.dto.*;
import cn.getech.data.development.service.*;
import cn.getech.data.development.utils.*;
import cn.getech.data.development.utils.shell.ExecuteShellUtil;
import cn.getech.data.development.utils.sync.DBUtils;
import cn.getech.data.intelligence.common.exception.BizExceptionEnum;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.DateUtils;
import cn.getech.data.intelligence.common.utils.PageUtils;
import cn.getech.data.intelligence.common.utils.R;
import cn.getech.data.intelligence.common.validator.ValidatorUtils;
import cn.getech.system.center.entity.SysSecurityDataLevelEntity;
import cn.getech.system.center.entity.SysUserEntity;
import cn.getech.system.center.mapper.SysUserMapper;
import cn.getech.system.center.service.SysSecurityDataLevelService;
import cn.getech.system.center.utils.ShiroUtils;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateTime;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.jcraft.jsch.JSchException;
import com.xxl.job.core.log.XxlJobLogger;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;

import javax.annotation.Resource;
import java.sql.*;
import java.text.NumberFormat;
import java.time.LocalDateTime;
import java.util.Date;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.regex.Pattern;
import java.util.stream.Collectors;


/**
 * @Author luoxi
 * @create 2020/11/16 14:52
 */
@Service
@Slf4j
public class DataSyncInfoServiceImpl extends ServiceImpl<DataSyncInfoMapper, DataSyncInfo> implements DataSyncInfoService {
    @Autowired
    private DataSyncInfoMapper dataSyncInfoMapper;

    @Autowired
    private DataSyncTableRecordMapper dataSyncTableRecordMapper;

    @Autowired
    private ConfConnectMapper confConnectMapper;

    @Autowired
    private SecurityUtil securityUtil;

    @Autowired
    private TableInfoService tableInfoService;

    @Autowired
    private DataSyncHiveConfig config;

    @Autowired
    private TableFieldInfoService tableFieldInfoService;

    @Autowired
    private IAllPermissionConfigService iAllPermissionConfigService;

    @Autowired
    private SysSecurityDataLevelService sysSecurityDataLevelService;

    @Autowired
    private BdpJobConfig bdpJobConfig;

    @Autowired
    private RangerDataService rangerDataService;

    @Resource
    private TableInfoMapper tableInfoMapper;

    @Autowired
    private TableFieldInfoMapper tableFieldInfoMapper;


    @Autowired
    private DataSyncTableLogMapper dataSyncTableLogMapper;

    @Autowired
    private SysUserMapper sysUserMapper;

    @Autowired
    private TableRegisterService tableRegisterService;

    @Autowired
    private TableRelationshipService tableRelationshipService;

    private static ConcurrentMap<Integer, String> concurrentMap = new ConcurrentHashMap();


    /**
     * 新建整库同步任务
     */
    @Override
    @Transactional(rollbackFor = Exception.class)
    public void add(DataSyncInfoAddDTO dataSyncInfoAddDTO) {
        Integer connectId = dataSyncInfoAddDTO.getSourceId();
        if (dataSyncInfoAddDTO.getSourceId() == null) {
            try {
                connectId = Integer.valueOf(dataSyncInfoAddDTO.getDataSource());
            } catch (NumberFormatException ignored) {
            }
        }
        ConfConnect confConnect = confConnectMapper.selectById(connectId);
        if (Objects.isNull(confConnect)) {
            throw new RRException("该同步任务的连接源信息不存在!");
        }
        dataSyncInfoAddDTO.setDataSource(dataSyncInfoAddDTO.getDataSource());
        if (StringUtils.isBlank(dataSyncInfoAddDTO.getTableMapping())) {
            dataSyncInfoAddDTO.setTableMapping(DataSyncTaskConstant.prefix);
        }
        Integer tenantId = ShiroUtils.getTenantId();
        //String aliasName = DataSyncWriteRuleEnum.getAliasName(dataSyncInfoAddDTO.getWriteRule());
        dataSyncInfoAddDTO.setWriteRule(DataSyncWriteRuleEnum.INSERT_OVERWRITE.getAliasName());
        DataSyncInfo dataSyncInfo = new DataSyncInfo();
        BeanUtils.copyProperties(dataSyncInfoAddDTO, dataSyncInfo);
        dataSyncInfo.setTaskSchedule(0);
        dataSyncInfo.setTenantId(tenantId);
        dataSyncInfo.setDataSource(confConnect.getName());
        dataSyncInfo.setSourceId(connectId);
        dataSyncInfo.setCreateTime(LocalDateTime.now());
        dataSyncInfo.setCreatePer(ShiroUtils.getUserId().intValue());
        dataSyncInfo.setCreateName(ShiroUtils.getUserEntity().getUsername());
        dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_NOT_START.getCode());
        dataSyncInfo.setDataSourceType(ConnectTypeEnum.ObjOf(Integer.valueOf(dataSyncInfo.getDataSourceType())).getName());
        List<DataSyncTableRecord> recordList = dataSyncInfoAddDTO.getRecordList();
        try {
            dataSyncInfoMapper.insert(dataSyncInfo);
            recordList.forEach(dataSyncTableRecord -> {
                dataSyncTableRecord.setDataInfoId(dataSyncInfo.getId());
            });
            dataSyncTableRecordMapper.batchInsert(recordList);
        } catch (Exception e) {
            e.printStackTrace();
            log.error(e.getMessage());
            throw new RRException(BizExceptionEnum.SERVER_ERROR);
        }
    }

    /**
     * 多条件分页查询
     *
     * @param dataSyncQueryDTO
     * @return
     */
    @Override
    public IPage<DataSyncInfo> listPage(DataSyncQueryDTO dataSyncQueryDTO) {
        LambdaQueryWrapper<DataSyncInfo> wrapper = new LambdaQueryWrapper<>();
        String dataSourceType = dataSyncQueryDTO.getDataSourceType();
        String name = null;
        if (StringUtils.isNotBlank(dataSourceType)) {
            name = ConnectTypeEnum.ObjOf(Integer.valueOf(dataSourceType)).getName();
        }
        Integer tenantId = ShiroUtils.getTenantId();
        wrapper.eq(tenantId != null, DataSyncInfo::getTenantId, tenantId);
        wrapper.like(StringUtils.isNotBlank(dataSyncQueryDTO.getCreateName()), DataSyncInfo::getCreateName, dataSyncQueryDTO.getCreateName())
                .like(StringUtils.isNotBlank(dataSyncQueryDTO.getTaskName()), DataSyncInfo::getTaskName, dataSyncQueryDTO.getTaskName())
                .eq(Objects.nonNull(dataSyncQueryDTO.getSearchUserId()), DataSyncInfo::getCreatePer, dataSyncQueryDTO.getSearchUserId())
                .eq(StringUtils.isNotBlank(name), DataSyncInfo::getDataSourceType, name)
                .eq(Objects.nonNull(dataSyncQueryDTO.getTaskStatus()), DataSyncInfo::getTaskStatus, dataSyncQueryDTO.getTaskStatus());

        if (StringUtils.equalsIgnoreCase("asc", dataSyncQueryDTO.getSort())) {
            wrapper.orderByAsc(DataSyncInfo::getCreateTime);
        }
        if (StringUtils.equalsIgnoreCase("desc", dataSyncQueryDTO.getSort())) {
            wrapper.orderByDesc(DataSyncInfo::getCreateTime);
        }
        if (StringUtils.isBlank(dataSyncQueryDTO.getSort())) {
            wrapper.orderByDesc(DataSyncInfo::getCreateTime);
        }
        IPage<DataSyncInfo> page = new Page<>(dataSyncQueryDTO.getCurrent(), dataSyncQueryDTO.getSize());
        try {
            //dataSyncInfoMapper.selectPage(page, wrapper);
            page = dataSyncInfoMapper.selectPage(page, wrapper);
            if (CollectionUtils.isEmpty(page.getRecords())) {
                return page;
            }

            for (DataSyncInfo record : page.getRecords()) {
                record.setWriteRule(DataSyncWriteRuleEnum.getNameByAliasName(record.getWriteRule()));
                List<DataSyncTableRecord> dataSyncTableRecords = dataSyncTableRecordMapper.selectList(new LambdaQueryWrapper<DataSyncTableRecord>()
                        .eq(DataSyncTableRecord::getDataInfoId, record.getId()));

                //查询同步失败表的数量
                List<DataSyncTableLog> tableLogList = dataSyncTableLogMapper.selectList(new LambdaQueryWrapper<DataSyncTableLog>()
                        .eq(DataSyncTableLog::getDataInfoId, record.getId()).eq(DataSyncTableLog::getSyncStatus, 2)
                        .select(DataSyncTableLog::getId));
                if (CollectionUtil.isNotEmpty(tableLogList)) {
                    record.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_FAIL.getCode());
                    dataSyncInfoMapper.updateStatusById(record.getId(), DataSyncTaskEnum.TASK_STATUS_FAIL.getCode());
                } else {
                    //查询同步成功表的个数
                    List<DataSyncTableLog> tableLogs = dataSyncTableLogMapper.selectList(new LambdaQueryWrapper<DataSyncTableLog>()
                            .eq(DataSyncTableLog::getDataInfoId, record.getId()).eq(DataSyncTableLog::getSyncStatus, 1));
                    if (CollectionUtil.isEmpty(tableLogs)) {
                        continue;
                    }
                    if (tableLogs.size() == dataSyncTableRecords.size()) {
                        record.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_SUCCESS.getCode());
                        dataSyncInfoMapper.updateStatusById(record.getId(), DataSyncTaskEnum.TASK_STATUS_SUCCESS.getCode());
                    }
                }
               /* if (record.getTaskSchedule().equals(0)) {
                    String result = getPercentNum(record);
                    record.setTaskSchedule(Integer.valueOf(result));
                }*/
            }
        } catch (Exception e) {
            e.printStackTrace();
            log.error("服务端内部错误:{}", e.getMessage());
            throw new RRException(BizExceptionEnum.SERVER_ERROR);
        }
        return page;
    }


    /**
     * 计算得到百分数
     *
     * @param record
     * @return
     */
    private String getPercentNum(DataSyncInfo record) {
        String s = concurrentMap.get(record.getId());
        if (StringUtils.isBlank(s)) {
            return "0";
        }
        String[] split = s.split(":");
        int num1 = Integer.valueOf(split[0]);
        int num2 = Integer.valueOf(split[1]);
        // 创建一个数值格式化对象
        NumberFormat numberFormat = NumberFormat.getInstance();
        // 设置精确到小数点
        numberFormat.setMaximumFractionDigits(0);
        return numberFormat.format((float) num1 / (float) num2 * 100);
    }

    /**
     * 详情查询
     *
     * @param id
     */
    @Override
    public DataSyncReturnDTO get(Integer id) {
        DataSyncReturnDTO dataSyncReturnDTO = new DataSyncReturnDTO();
        DataSyncInfo dataSyncInfo = dataSyncInfoMapper.selectById(id);
        if (Objects.isNull(dataSyncInfo)) {
            return dataSyncReturnDTO;
        }
        dataSyncInfo.setWriteRule(DataSyncWriteRuleEnum.getNameByAliasName(dataSyncInfo.getWriteRule()));
        BeanUtils.copyProperties(dataSyncInfo, dataSyncReturnDTO);

        LambdaQueryWrapper<DataSyncTableRecord> lambdaQueryWrapper = new LambdaQueryWrapper();
        lambdaQueryWrapper
                .eq(DataSyncTableRecord::getDataInfoId, id);

        List<DataSyncTableRecord> dataSyncTableRecords = dataSyncTableRecordMapper.selectList(lambdaQueryWrapper);
        dataSyncReturnDTO.setRecordList(dataSyncTableRecords);
        Integer tableCount = dataSyncTableRecordMapper.selectCount(new LambdaQueryWrapper<DataSyncTableRecord>().eq(DataSyncTableRecord::getDataInfoId, id));
        dataSyncReturnDTO.setTableCount(tableCount);
        return dataSyncReturnDTO;
    }


    @Override
    @Transactional(rollbackFor = Exception.class)
    public void del(List<Integer> ids) {
        List<DataSyncInfo> dataSyncInfoList = dataSyncInfoMapper.selectList(new LambdaQueryWrapper<DataSyncInfo>().in(DataSyncInfo::getId, ids));
        if (CollectionUtil.isEmpty(dataSyncInfoList)) {
            return;
        }
        for (DataSyncInfo dataSyncInfo : dataSyncInfoList) {
            if (dataSyncInfo.getTaskStatus().equals(DataSyncTaskEnum.TASK_STATUS_STARTING.getCode())) {
                throw new RRException("任务还在进行中,不能删除!");
            }
        }

        Integer currentUserId = ShiroUtils.getUserId().intValue();
        LambdaQueryWrapper<DataSyncInfo> lambdaQueryWrapper = new LambdaQueryWrapper<>();
        lambdaQueryWrapper.eq(DataSyncInfo::getCreatePer, currentUserId)
                .in(DataSyncInfo::getId, ids);
        List<DataSyncInfo> dataSyncInfos = dataSyncInfoMapper.selectList(lambdaQueryWrapper);
        if (!CollectionUtils.isEmpty(dataSyncInfos)) {
            if (!Objects.equals(ids.size(), dataSyncInfos.size())) {
                throw new RRException("只能删除自己创建的同步任务!");
            }
        }
        try {
            dataSyncInfoMapper.deleteBatchIds(ids);
        } catch (Exception e) {
            e.printStackTrace();
            log.error("异常信息:{}", e.getMessage());
            throw new RRException(BizExceptionEnum.SERVER_ERROR);
        }
    }

    /**
     * 编辑
     *
     * @param dataSyncInfoAddDTO
     */
    @Override
    @Transactional(rollbackFor = Exception.class)
    public void edit(DataSyncInfoAddDTO dataSyncInfoAddDTO) {
        Integer connectId = dataSyncInfoAddDTO.getSourceId();
        if (dataSyncInfoAddDTO.getSourceId() == null) {
            try {
                connectId = Integer.valueOf(dataSyncInfoAddDTO.getDataSource());
            } catch (NumberFormatException ignored) {
            }
        }
        ConfConnect confConnect = confConnectMapper.selectById(connectId);
        if (Objects.isNull(confConnect)) {
            throw new RRException("该同步任务的连接源信息不存在!");
        }
        dataSyncInfoAddDTO.setDataSource(dataSyncInfoAddDTO.getDataSource());
        //String aliasName = DataSyncWriteRuleEnum.getAliasName(dataSyncInfoAddDTO.getWriteRule());
        dataSyncInfoAddDTO.setWriteRule(DataSyncWriteRuleEnum.INSERT_OVERWRITE.getAliasName());
        DataSyncInfo dataSyncInfo = new DataSyncInfo();
        BeanUtils.copyProperties(dataSyncInfoAddDTO, dataSyncInfo);
        dataSyncInfo.setDataSource(confConnect.getName());
        dataSyncInfo.setSourceId(connectId);
        dataSyncInfo.setId(dataSyncInfoAddDTO.getId());
        dataSyncInfo.setUpdateTime(LocalDateTime.now());
        dataSyncInfo.setUpdatePer(ShiroUtils.getUserId().intValue());
        dataSyncInfo.setUpdateName(ShiroUtils.getUserEntity().getUsername());
        dataSyncInfo.setDataSourceType(ConnectTypeEnum.ObjOf(Integer.valueOf(dataSyncInfo.getDataSourceType())).getName());
        try {
            dataSyncInfoMapper.updateById(dataSyncInfo);
            dataSyncTableRecordMapper.delTableRecord(dataSyncInfoAddDTO.getId());
            dataSyncInfoAddDTO.getRecordList().forEach(dataSyncTableRecord -> {
                dataSyncTableRecord.setDataInfoId(dataSyncInfoAddDTO.getId());
            });
            dataSyncTableRecordMapper.batchInsert(dataSyncInfoAddDTO.getRecordList());
        } catch (Exception e) {
            e.printStackTrace();
            throw new RRException(BizExceptionEnum.SERVER_ERROR);
        }
    }

    /**
     * 根据数据来源查询数据源信息
     *
     * @param dataSourceType
     */
    @Override
    public List<DataSourceLevelDTO> getByDataSource(Integer dataSourceType) {

        Map<String, Object> params = new HashMap<>();
        params.put("create_per", ShiroUtils.getUserId().intValue());
        params.put("sidx", "id");
        params.put("order", "desc");
        params.put("typeId", dataSourceType);
        List<DataSourceLevelDTO> dataSourceLevelDTOS = new ArrayList<>();
        DataPermissionUtil.putDataFilter(params, PermissionTypeEnum.RESOURCE.getType(), PermissionTypeEnum.QUERY.getType());
        List<ConfConnect> confConnects = confConnectMapper.listNewNoRelation(params);
        if (CollectionUtils.isEmpty(confConnects)) {
            return dataSourceLevelDTOS;
        }
        DataSourceLevelDTO dataSourceLevelDTO = null;
        for (ConfConnect confConnect : confConnects) {
            Set<String> set = new HashSet();
            dataSourceLevelDTO = new DataSourceLevelDTO();
            dataSourceLevelDTO.setLabel(confConnect.getName());
            dataSourceLevelDTO.setConnectId(confConnect.getId());
            set.add(confConnect.getDbname());
            dataSourceLevelDTO.setValue(set);
            dataSourceLevelDTOS.add(dataSourceLevelDTO);
        }
        return dataSourceLevelDTOS;
    }

    /**
     * 根据数据库查询来源表信息
     *
     * @param dataBaseDTO
     */
    @Override
    public PageUtils<DataSyncMetaDataReturnDTO> getByDataBase(DataBaseDTO dataBaseDTO) {
        List<String> list = Lists.newArrayList();
        ConfConnect confConnect = confConnectMapper.selectById(dataBaseDTO.getConnectId());
        if (Objects.isNull(confConnect)) {
            return new PageUtils<>(new ArrayList<>(),0,10,1);
        }
        if (!confConnect.getTypeId().equals(ConnectTypeEnum.WebService.getCode()) || StringUtils.isNotBlank(confConnect.getPassword())) {
            try {
                confConnect.setPassword(securityUtil.decrypt(confConnect.getPassword()));
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        //从元数据获取表名信息
        String dbName = "";
        if (StringUtils.isNotEmpty(dataBaseDTO.getDbName())) {
            dbName = dataBaseDTO.getDbName();
        }
        ConnectTypeEnum t = ConnectTypeEnum.ObjOf(confConnect.getTypeId());
        String sourceConf = t.getUrl().replace("<host>", confConnect.getHost()).replace("<port>",
                confConnect.getPort()).replace("<db_name>", dbName);
        dataBaseDTO.setSourceConf(sourceConf);
        Integer tenantId = ShiroUtils.getTenantId();
        dataBaseDTO.setTenantId(tenantId);
        IPage<DataSyncMetaDataReturnDTO> param = new Page<>(dataBaseDTO.getPage(), dataBaseDTO.getLimit());
        IPage<DataSyncMetaDataReturnDTO> page = dataSyncInfoMapper.selectCollectionTableInfo(param,dataBaseDTO);
        if (CollectionUtil.isEmpty(page.getRecords())) {
            return new PageUtils<>(new ArrayList<>(), 0, 10, 1);
        }
        getJDBCQueryTables(dbName, list, confConnect,page.getRecords());
        Map<String, List<DataSyncFieldDTO>> tableMap  = new HashMap<>();
        Map<String, Set<String>> tableKeys  = new HashMap<>();
        //从数据源获取表的字段 类型，长度,主键信息
        for (String tableName : list) {
            //查询主键信息
            Set<String> actualPrimaryKeys = getMysqlPrimaryKey(dbName, tableName, confConnect);
            //查询字段信息
            List<DataSyncFieldDTO> mysqlFieldInfo = getMysqlFieldInfo(dataBaseDTO, tableName, confConnect);
            tableMap.put(tableName, mysqlFieldInfo);
            tableKeys.put(tableName, actualPrimaryKeys);
        }
        List<Integer> tableIds = page.getRecords().stream().map(DataSyncMetaDataReturnDTO::getId).collect(Collectors.toList());
        List<DataSyncMetaDataReturnDTO> metaFieldsAllList = dataSyncInfoMapper.selectCollectionTableFieldInfo(tableIds);
        Map<Integer, List<DataSyncMetaDataReturnDTO>> groupByMaps = metaFieldsAllList.stream()
                .collect(Collectors.groupingBy(DataSyncMetaDataReturnDTO::getId));
        //获取元数据的表字段 类型 长度 和主键信息
        for (DataSyncMetaDataReturnDTO returnDTO : page.getRecords()) {
            returnDTO.setCompareResult("已是最新元数据");
            returnDTO.setStatus(1);
            List<DataSyncMetaDataReturnDTO> metaFieldsList = groupByMaps.get(returnDTO.getId());
            //元数据中的字段
            Map<String, DataSyncMetaDataReturnDTO> metaFieldMap  = new HashMap<>();
            Set<String> metaPrimaryKeys=Sets.newHashSet();
            if (CollectionUtil.isNotEmpty(metaFieldsList)) {
                for (DataSyncMetaDataReturnDTO dataReturnDTO : metaFieldsList) {
                    if (dataReturnDTO.getFieldIsPrk() != null && dataReturnDTO.getFieldIsPrk().equals(1)) {
                        metaPrimaryKeys.add(dataReturnDTO.getFieldName());
                    }
                    metaFieldMap.put(dataReturnDTO.getFieldName(), dataReturnDTO);
                }
            }else {
                continue;
            }
            if (!tableMap.keySet().contains(returnDTO.getTableName())) {
                returnDTO.setStatus(2);
                returnDTO.setCompareResult("真实数据中没有该表:"+returnDTO.getTableName());
                continue;
            }
            //实际库中字段
            List<DataSyncFieldDTO> actualFieldInfo = tableMap.get(returnDTO.getTableName());
            if (CollectionUtil.isEmpty(actualFieldInfo)) {
                returnDTO.setStatus(2);
                returnDTO.setCompareResult("真实数据中没有字段:"+returnDTO.getTableName());
            } else {
                //元数据中的字段
                Map<String, DataSyncFieldDTO> actualFieldMap = new HashMap<>();
                for (DataSyncFieldDTO dataSyncFieldDTO : actualFieldInfo) {
                    actualFieldMap.put(dataSyncFieldDTO.getFieldName(), dataSyncFieldDTO);
                }
                Set<String> actualPrimaryKeys = tableKeys.get(returnDTO.getTableName());

                if ( actualPrimaryKeys.size() !=metaPrimaryKeys.size() ||
                        !actualPrimaryKeys.containsAll(metaPrimaryKeys) ||
                        !metaPrimaryKeys.containsAll(actualPrimaryKeys)

                ) {
                    returnDTO.setStatus(2);
                    String metaKey = "空";
                    if (CollectionUtil.isNotEmpty(metaPrimaryKeys)) {
                        metaKey = String.join(",", metaPrimaryKeys);
                    }
                    String actualKey = "空";
                    if (CollectionUtil.isNotEmpty(actualPrimaryKeys)) {
                        actualKey = String.join(",", actualPrimaryKeys);
                    }
                    String s = String.format("表%s中的主键为%s,元数据中主键为%s",
                            returnDTO.getTableName(),
                            actualKey,
                            metaKey
                    );
                    returnDTO.setCompareResult(s);
                    continue;
                }
                for (DataSyncFieldDTO field : actualFieldInfo) {
                    DataSyncMetaDataReturnDTO fieldMeta = metaFieldMap.get(field.getFieldName());
                    if (fieldMeta != null) {

                        if (!fieldMeta.getFieldType().equalsIgnoreCase(field.getFieldType())
                            //&& fieldMeta.getFieldLength().equals(field.getFieldLength())
                        ) {
                            returnDTO.setStatus(2);
                            String s = String.format("表%s的字段类型对比,库中为%s,原书中为%s",
                                    returnDTO.getTableName(),
                                    field.getFieldType(),
                                    fieldMeta.getFieldType()
                            );
                            returnDTO.setCompareResult(s);
                            break ;
                        }
                    } else {
                        returnDTO.setStatus(2);
                        returnDTO.setCompareResult(returnDTO.getTableName()
                                + "元数据表没有该字段:" + field.getFieldName());
                        break ;
                    }
                }

                for (DataSyncMetaDataReturnDTO field : metaFieldsList) {
                    DataSyncFieldDTO fieldMeta = actualFieldMap.get(field.getFieldName());
                    if (fieldMeta != null) {
                        if (!fieldMeta.getFieldType().equalsIgnoreCase(field.getFieldType())
                            //&& fieldMeta.getFieldLength().equals(field.getFieldLength())
                        ) {
                            returnDTO.setStatus(2);
                            String s = String.format("表%s的字段类型对比,库中为%s,元数据中为%s",
                                    returnDTO.getTableName(),
                                    fieldMeta.getFieldType(),
                                    field.getFieldType()
                            );
                            returnDTO.setCompareResult(s);
                            break ;
                        }
                    } else {
                        returnDTO.setStatus(2);
                        returnDTO.setCompareResult(returnDTO.getTableName()
                                + "实际库中没有该字段:" + field.getFieldName());
                        break ;
                    }
                }
            }

        }
        return  new PageUtils<>(page);

    }


    private  Set<String> getMysqlPrimaryKey(String  dbName, String tableName, ConfConnect confConnect) {
        Set<String> primaryKeys=Sets.newHashSet();
        Connection conn = null;
        Statement statement = null;
        ResultSet rs = null;
        String URL = "";
        try {
            String tableNameNew=tableName;
            if (ConnectTypeEnum.Oracle.getCode().equals(confConnect.getTypeId())) {
                tableNameNew=tableName.toUpperCase(); // 对于oracle要将表名转为大写才能正确取出主键来
            }
            ConnectTypeEnum t = ConnectTypeEnum.ObjOf(confConnect.getTypeId());
            Class.forName(t.getDriverClass());
            if (StringUtils.isNotEmpty(dbName))
                URL = t.getUrl().replace("<host>", confConnect.getHost()).replace("<port>", confConnect.getPort()).replace("<db_name>", dbName);
            else
                URL = t.getUrl2().replace("<host>", confConnect.getHost()).replace("<port>", confConnect.getPort());
            //根据表名获得主键结果集
            ResultSet pks =null;
            conn = DriverManager.getConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            //获取结果集元数据
            pks =  conn.getMetaData().getPrimaryKeys(null, null, tableNameNew);
            //获取主键信息
            while (pks.next()) {
                String primaryKeyName=pks.getString(4);
                primaryKeys.add(primaryKeyName);
            }
            log.info("查询成功=================================");
        } catch (Exception e) {
            log.warn("获取主键异常:{},\n jdbc url:{}",e.getMessage(),URL);
            return primaryKeys;
        } finally {
            DBUtils.close(conn, statement, rs);
        }
        return primaryKeys;
    }


    private List<DataSyncFieldDTO> getMysqlFieldInfo(DataBaseDTO dataBaseDTO, String tableName, ConfConnect confConnect) {
        List<DataSyncFieldDTO> dataSyncFieldDTOS = new ArrayList<>();
        Connection conn = null;
        Statement statement = null;
        ResultSet rs = null;
        try {
            String sql = "";
            if (ConnectTypeEnum.MySQL.getCode().equals(confConnect.getTypeId())) {
                sql = String.format(DbSql.MYSQL_COLUMN_SQL,dataBaseDTO.getDbName(),tableName);
            }
            if (ConnectTypeEnum.Oracle.getCode().equals(confConnect.getTypeId())) {
                sql = String.format(DbSql.ORACLE_COLUMN_LENGTH_SQL,tableName,confConnect.getUsername().toUpperCase());
            }
            ConnectTypeEnum t = ConnectTypeEnum.ObjOf(confConnect.getTypeId());
            Class.forName(t.getDriverClass());
            String URL = warpUrl(confConnect, t);
            conn = DriverManager.getConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            if (conn != null && !conn.isClosed()) {
                statement = conn.createStatement();
                rs = statement.executeQuery(sql);
                DataSyncFieldDTO dataSyncFieldDTO = null;
                while (rs.next()) {
                    dataSyncFieldDTO = new DataSyncFieldDTO();
                    String column_name = rs.getString("column_name");
                    String data_type = rs.getString("data_type");
                    String data_length = rs.getString("data_length");
                    dataSyncFieldDTO.setFieldName(column_name);
                    dataSyncFieldDTO.setFieldType(data_type);

                    if (ConnectTypeEnum.Oracle.getCode().equals(confConnect.getTypeId())) {
                        dataSyncFieldDTO.setFieldLength(data_length);
                    }
                    if (ConnectTypeEnum.MySQL.getCode().equals(confConnect.getTypeId())) {
                        if (data_length.contains("(")) {
                            data_length = data_length.replace("(", ",").replace(")", "");
                            String[] split = StringUtils.split(data_length, ",");
                            dataSyncFieldDTO.setFieldLength(split[1]);
                        }
                    }
                    dataSyncFieldDTOS.add(dataSyncFieldDTO);
                }
            }
            log.info("查询成功=================================");
        } catch (Exception e) {
            e.printStackTrace();
            throw new RRException("数据源连接异常..........");
        } finally {
            DBUtils.close(conn, statement, rs);
        }
        return dataSyncFieldDTOS;
    }

    /**
     * 同步
     *
     * @param id
     */
    @Override
    public R syncDataOrTable(Integer id, String syncQueue) {
        //更新同步任务队列
        long startTime = new Date().getTime();
        DataSyncInfo dataSyncInfo = dataSyncInfoMapper.selectById(id);
        if (Objects.isNull(dataSyncInfo)) {
            return R.error("该同步任务不存在!");
        }
        LambdaQueryWrapper<DataSyncTableRecord> lambdaQueryWrapper = new LambdaQueryWrapper<>();
        lambdaQueryWrapper.eq(DataSyncTableRecord::getDataInfoId, id);
        List<DataSyncTableRecord> dataSyncTableRecords = dataSyncTableRecordMapper.selectList(lambdaQueryWrapper);
        if (CollectionUtils.isEmpty(dataSyncTableRecords)) {
            return R.error("该任务下未配置同步表信息!");
        }
        //参数设值
        ConfConnect confConnect = confConnectMapper.selectById(dataSyncInfo.getSourceId());
        if (Objects.isNull(confConnect)) {
            return R.error("指定的数据源信息不存在!");
        }
        if (StringUtils.isBlank(confConnect.getPassword())) {
            return R.error("指定的连接信息的密码为空!");
        }
        if (!confConnect.getTypeId().equals(ConnectTypeEnum.MySQL.getCode()) &&
                !confConnect.getTypeId().equals(ConnectTypeEnum.Oracle.getCode())) {
            return R.error("仅支持MYSQL和ORACLE的数据同步");
        }
        //若目标表存在，则不进行整库同步
        if (Objects.equals(dataSyncInfo.getTableExistRule(), 1)) {
            for (DataSyncTableRecord dataSyncTableRecord : dataSyncTableRecords) {
                boolean flag = tableInfoService.checkExistByName(dataSyncInfo.getToHiveDatabase(), dataSyncTableRecord.getPreviewTable());
                if (flag) {
                    dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_STOP.getCode());
                    dataSyncInfoMapper.updateById(dataSyncInfo);
                    return R.ok();
                }
            }
        }
        try {
            confConnect.setPassword(securityUtil.decrypt(confConnect.getPassword()));
        } catch (Exception e) {
            log.warn("密码解密错误{}",confConnect.getPassword());
        }
        updateDataSyncQueue(id, syncQueue);
        ExecutorService pool = Executors.newSingleThreadExecutor();
        pool.execute(() -> {
            try {
                synTable(startTime, confConnect, dataSyncInfo, syncQueue, dataSyncTableRecords);
            } catch (Exception e) {
                log.warn("任务运行异常:{},{}",dataSyncInfo.getTaskName(),dataSyncInfo.getId());
                pool.shutdownNow();
            }
        });
        return R.ok();
    }

    private void synTable(long startTime,ConfConnect confConnect,
                          DataSyncInfo dataSyncInfo,String syncQueue,
                          List<DataSyncTableRecord> dataSyncTableRecords ){
        //获取参数值
        String dbName = dataSyncInfo.getDataLibrary();
        String storageFormat = dataSyncInfo.getStorageFormat();
        DataSyncTableLog tableLog = new DataSyncTableLog();
        List<TableRegisterEntity> registerList = new ArrayList<>();
        if (Objects.equals(SyncMethodEnum.SYNC_METHOD_1.getCode(), dataSyncInfo.getSyncMethod())) {
            try {
                for (DataSyncTableRecord dataSyncTableRecord : dataSyncTableRecords) {

                    //同步表结构日志
                    tableLog.setDbName(dbName);
                    tableLog.setDbUrl(dataSyncInfo.getDataSource());
                    tableLog.setDispatchQueue(syncQueue);
                    tableLog.setDataInfoId(dataSyncInfo.getId());
                    tableLog.setSyncStatus(0);
                    tableLog.setTableName(dataSyncTableRecord.getOriginTable());
                    List<TableFieldInfo> list = Lists.newArrayList();
                    getDataSyncTableLog(list, dataSyncInfo, storageFormat,
                            dbName, confConnect, tableLog, dataSyncTableRecord, registerList);
                    dataSyncTableLogMapper.insert(tableLog);
                }
                //同步成功状态
                dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_SUCCESS.getCode());
                dataSyncInfoMapper.updateById(dataSyncInfo);
            } catch (Exception e) {
                log.error("同步表结构失败",e);
            }
        }
        try {
            //如果是同步表结构和表数据
            if (Objects.equals(SyncMethodEnum.SYNC_METHOD_2.getCode(), dataSyncInfo.getSyncMethod())) {
                Long userId = ShiroUtils.getUserId();
                //循环遍历
                for (DataSyncTableRecord dataSyncTableRecord : dataSyncTableRecords) {
                    //同步表结构日志
                    tableLog.setDbName(dbName);
                    tableLog.setDbUrl(dataSyncInfo.getDataSource());
                    tableLog.setDispatchQueue(syncQueue);
                    tableLog.setSyncStatus(0);
                    tableLog.setDataInfoId(dataSyncInfo.getId());
                    tableLog.setTableName(dataSyncTableRecord.getOriginTable());
                    tableLog.setCreatePer(dataSyncTableRecord.getCreatePer());
                    List<TableFieldInfo> filedList = Lists.newArrayList();
                    getDataSyncTableLog(filedList, dataSyncInfo,
                            storageFormat, dbName, confConnect, tableLog, dataSyncTableRecord, registerList);
                    //同步成功状态
                    dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_STARTING.getCode());
                    dataSyncInfoMapper.updateById(dataSyncInfo);
                    if (!DataSyncStatusEnum.FAIL.getCode().equals(tableLog.getSyncStatus())) {//表创建没失败 测继续同步数据
                        tableLog.setSyncStatus(0);
                        //同步表数据
                        ConvertJsonFile(confConnect, dataSyncTableRecord, storageFormat, filedList, syncQueue, dataSyncInfo, userId);
                    }
                    dataSyncTableLogMapper.insert(tableLog);
                }
            }
        } catch (Exception e) {
            dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_FAIL.getCode());
            dataSyncInfoMapper.updateById(dataSyncInfo);
            log.error("同步表结构和数据失败",e);
        }
        addRegisters(startTime, registerList);
    }


    /**
     * 同步到数仓的表添加到元数据
     */
    private void addRegisters(long startTime, List<TableRegisterEntity> registerList) {
        if (CollectionUtil.isEmpty(registerList))
            return;

        Date curr = new Date();
        long endTime = curr.getTime();
        long cost = endTime - startTime;
        registerList.forEach(r -> {
            r.setBatchNum(endTime);
            r.setRunTime(curr);
            r.setCost(cost);
        });

        tableRegisterService.saveBatch(registerList);
    }

    private void updateDataSyncQueue(Integer id, String syncQueue) {
        DataSyncInfo dataSyncInfo = new DataSyncInfo();
        dataSyncInfo.setId(id);
        dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_STARTING.getCode());
        dataSyncInfo.setDispatchQueue(syncQueue);
        dataSyncInfoMapper.updateById(dataSyncInfo);
    }

    private Map<String, Object> getDataSyncTableLog(List<TableFieldInfo> tableStructure, DataSyncInfo dataSyncInfo, String storageFormat,
                                                    String dbName, ConfConnect confConnect,
                                                    DataSyncTableLog tableLog,
                                                    DataSyncTableRecord dataSyncTableRecord,
                                                    List<TableRegisterEntity> registerList) {
        Map<String, Object> hashMap = new HashMap<>();
        //jdbc连接
        try {
            tableStructure.addAll(getTableStructure(confConnect, dataSyncTableRecord.getOriginTable(), dbName));
            if (CollectionUtil.isEmpty(tableStructure)) {
                tableLog.setSyncStatus(DataSyncStatusEnum.FAIL.getCode());
                String msg=String.format("用户%s在库%s中无法获取表%s字段信息",confConnect.getUsername(),
                        dbName,dataSyncTableRecord.getOriginTable());
                tableLog.setFailReason(msg);
                return hashMap;
            }
            for (TableFieldInfo tableFieldInfo : tableStructure) {
                //校验是否含中文字段名
                if (tableFieldInfo.getFieldName().length() != tableFieldInfo.getFieldName().getBytes().length) {
                    tableLog.setSyncStatus(DataSyncStatusEnum.FAIL.getCode());
                    String msg = String.format("库%s中的表%s字段名%s包含中文或者特殊字符,不支持同步", dbName,
                            dataSyncTableRecord.getOriginTable(), tableFieldInfo.getFieldName());
                    tableLog.setFailReason(msg);
                    return hashMap;
                }
            }
            TableInfo tableInfo = new TableInfo();
            //获取表描述
            String tableComment = getTableCommentInfo(confConnect, dataSyncTableRecord);
            tableInfo.setDbName(dataSyncInfo.getToHiveDatabase());
            tableInfo.setTableName(dataSyncTableRecord.getPreviewTable());

            if (StringUtils.isNotBlank(tableComment)) {
                tableInfo.setTableAlias(tableComment);
            } else {
                tableInfo.setTableAlias(dataSyncTableRecord.getOriginTable());
            }
            tableInfo.setCreatePer(dataSyncInfo.getCreatePer());
            tableInfo.setFormat(storageFormat);
            tableInfo.setTableFieldInfos(tableStructure);
            tableInfo.setDelimitId(6);
            tableInfo.setUpdateTime(new Date());
            tableInfo.setCreateTime(new Date());
            if (StringUtils.isNotBlank(tableComment)) {
                tableInfo.setDesc(tableComment);
            } else {
                tableInfo.setDesc(dataSyncTableRecord.getOriginTable());
            }
            //同步表结构
            R result = createTable(tableInfo, dataSyncInfo, registerList, confConnect, dataSyncTableRecord);
            if (null != result && result.get("code").equals(0)) {
                tableLog.setSyncStatus(DataSyncStatusEnum.SUCCESS.getCode());
            } else {
                //否则是失败的日志
                tableLog.setSyncStatus(DataSyncStatusEnum.FAIL.getCode());
                tableLog.setFailReason(result.get("msg").toString());
            }

        } catch (Exception e) {
            tableLog.setSyncStatus(DataSyncStatusEnum.FAIL.getCode());
            dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_FAIL.getCode());
            dataSyncInfoMapper.updateById(dataSyncInfo);
            tableLog.setFailReason("同步表结构失败!"+e.getMessage());
            log.error("同步表结构失败",e);
        }
        return hashMap;
    }

    private String getTableCommentInfo(ConfConnect confConnect, DataSyncTableRecord dataSyncTableRecord) {
        Connection conn = null;
        Statement statement = null;
        ResultSet rs = null;
        String tableComment = "";
        try {
            int i = 1;
            //如果是mysql
            String tableStructureSql = "";
            if (confConnect.getTypeId().equals(ConnectTypeEnum.MySQL.getCode())) {
                tableStructureSql = String.format(DbSql.MYSQL_TABLE_COMMENT_SQL,confConnect.getDbname(),dataSyncTableRecord.getOriginTable());
            }
            if (confConnect.getTypeId().equals(ConnectTypeEnum.Oracle.getCode())) {
                tableStructureSql = String.format(DbSql.ORACLE_TABLE_COMMENT_SQL,dataSyncTableRecord.getOriginTable());
            }
            ConnectTypeEnum t = ConnectTypeEnum.ObjOf(confConnect.getTypeId());
            Class.forName(t.getDriverClass());
            String URL = warpUrl(confConnect, t);
            conn = DruidUtil.getInstance().getMysqlConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            //conn = DriverManager.getConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            if (conn != null && !conn.isClosed()) {
                statement = conn.createStatement();
                rs = statement.executeQuery(tableStructureSql);
                while (rs.next()) {
                    tableComment = rs.getString("table_comment");
                }
            }
            log.info("查询成功=================================");
        } catch (Exception e) {
            XxlJobLogger.log(e);
            e.printStackTrace();
            throw new RRException("数据连接异常..............");
        } finally {
            DBUtils.close(conn, statement, rs);
        }
        return tableComment;
    }

    private String warpUrl(ConfConnect confConnect, ConnectTypeEnum t) {
        String URL = "";
        if (StringUtils.isNotEmpty(confConnect.getDbname()))
            URL = t.getUrl().replace("<host>", confConnect.getHost()).replace("<port>", confConnect.getPort()).replace("<db_name>", confConnect.getDbname());
        else
            URL = t.getUrl2().replace("<host>", confConnect.getHost()).replace("<port>", confConnect.getPort());
        return URL;
    }

    /**
     * 得到日志信息
     *
     * @param id
     */
    @Override
    public DataSyncTableStructDTO getLogInfo(Integer id) {
        DataSyncTableStructDTO dataSyncTableStructDTO = new DataSyncTableStructDTO();
        //根据Id查询
        DataSyncLogDTO dataSyncLogDTO = new DataSyncLogDTO();
        DataSyncInfo dataSyncInfo = dataSyncInfoMapper.selectById(id);
        if (Objects.isNull(dataSyncInfo)) {
            return dataSyncTableStructDTO;
        }
        if (DataSyncTaskEnum.TASK_STATUS_FAIL.getCode().equals(dataSyncInfo.getTaskStatus())) {
            LambdaQueryWrapper<DataSyncTableLog> lambdaQueryWrapper = new LambdaQueryWrapper<>();
            lambdaQueryWrapper.eq(DataSyncTableLog::getDataInfoId, id);
            List<DataSyncTableLog> logList = dataSyncTableLogMapper.selectList(lambdaQueryWrapper);
            StringBuilder msg = new StringBuilder("");
            if (CollectionUtil.isNotEmpty(logList)) {
                for (DataSyncTableLog dataSyncTableLog : logList) {
                    if (StringUtils.isNotEmpty(dataSyncTableLog.getFailReason())) {
                        msg.append(dataSyncTableLog.getFailReason()).append("\n");
                    }
                }
            }
            dataSyncLogDTO.setLog(msg.toString());
        }
        if (DataSyncTaskEnum.TASK_STATUS_STOP.getCode().equals(dataSyncInfo.getTaskStatus())) {
            dataSyncLogDTO.setLog("目标表存在,不进行同步,任务中止.......................");
        }
        //查询源表信息
        LambdaQueryWrapper<DataSyncTableRecord> lambdaQueryWrapper = new LambdaQueryWrapper<>();
        lambdaQueryWrapper.eq(DataSyncTableRecord::getDataInfoId, id);
        List<DataSyncTableRecord> dataSyncTableRecords = dataSyncTableRecordMapper.selectList(lambdaQueryWrapper);

        //查询生成表结构日志信息
        List<String> tables = dataSyncTableLogMapper.selectByInfoId(id);
        dataSyncTableStructDTO.setTables(tables);
        dataSyncTableStructDTO.setDispatchQueue(dataSyncInfo.getDispatchQueue());
        dataSyncTableStructDTO.setTaskName(dataSyncInfo.getTaskName());
        dataSyncTableStructDTO.setTaskStatusInfo(DataSyncTaskEnum.getMsg(dataSyncInfo.getTaskStatus()));
        dataSyncTableStructDTO.setId(id);
        try {
            //如果是同步表结构和表数据
            if (Objects.equals(SyncMethodEnum.SYNC_METHOD_2.getCode(), dataSyncInfo.getSyncMethod())) {
                StringBuilder builder = new StringBuilder();
                if (StringUtils.isNotEmpty(dataSyncLogDTO.getLog())) {
                    builder.append(dataSyncLogDTO.getLog());
                } else {
                    ExecuteShellUtil shell = ExecuteShellUtil.getInstance();
                    shell.init(config.getHiveHost(), config.getHivePort(),
                            config.getHiveUserName(), config.getHivePwd());
                    String dataSyncDir = config.getDataSyncDir();
                    for (DataSyncTableRecord dataSyncTableRecord : dataSyncTableRecords) {
                        String logFile = dataSyncDir + "/" + dataSyncInfo.getDataLibrary() + "_" + dataSyncTableRecord.getOriginTable() + "_" + dataSyncTableRecord.getId() + ".log";
                        log.info("logFile path:{}", logFile);
                        String log = shell.getFile(logFile);
                        builder.append(log);
                    }
                }
                dataSyncLogDTO.setLog(builder.toString());
                dataSyncLogDTO.setId(dataSyncInfo.getId());
            }
        } catch (JSchException e) {
            e.printStackTrace();
        }
        dataSyncTableStructDTO.setDataSyncLogDTO(dataSyncLogDTO);
        return dataSyncTableStructDTO;
    }

    /**
     * 任务创建人的下拉框
     *
     * @return
     */
    @Override
    public Set<ApplyUserListDTO> getCreateUserList() {
        Integer tenantId = ShiroUtils.getTenantId();
        return dataSyncInfoMapper.getCreateUserList(tenantId);
    }

    /**
     * 通过数据来源获取数据源
     *
     * @param dataSyncDataSourceDTO 1:mysql 2:oracle
     * @return
     */
    @Override
    public PageUtils getDataSourceByType(DataSyncDataSourceDTO dataSyncDataSourceDTO) {
        IPage<ConfConnect> page=new Page<>(dataSyncDataSourceDTO.getCurrent(),dataSyncDataSourceDTO.getLimit());
        Map <String,Object>params=new HashMap<>();
        params.put("create_per", ShiroUtils.getUserId().intValue());
        params.put("typeId", dataSyncDataSourceDTO.getTypeId());
        params.put("typeIds",dataSyncDataSourceDTO.getTypeIds());
        params.put("tenant_id",ShiroUtils.getTenantId());
        //当前登录人
        DataPermissionUtil.putDataFilter(params,
                PermissionTypeEnum.RESOURCE.getType(), PermissionTypeEnum.QUERY.getType());
        page = dataSyncInfoMapper.getDataSourceByType(page, params);
        return new PageUtils(page);
    }


    /**
     * json数据转换成json文件 并上传json文件到hive服务器上 并执行相关的shell脚本
     *
     */
    private R ConvertJsonFile(ConfConnect confConnect, DataSyncTableRecord dataSyncTableRecord, String storageFormat, List<TableFieldInfo> list, String syncQueue, DataSyncInfo dataSyncInfo,  Long userId ) {
        SysUserEntity sysUserEntity = sysUserMapper.selectOne(new LambdaQueryWrapper<SysUserEntity>().eq(SysUserEntity::getUserId, userId));
        if (Objects.isNull(sysUserEntity)) {
            sysUserEntity = new SysUserEntity();
        }
        JSONObject root = jsonDataProcess(confConnect, dataSyncTableRecord, storageFormat, list, dataSyncInfo, sysUserEntity);
        return shellOperation(dataSyncTableRecord, syncQueue, dataSyncInfo, root);
    }

    //执行 json文件上传 shell脚本生成与执行
    private R shellOperation(DataSyncTableRecord dataSyncTableRecord, String syncQueue, DataSyncInfo dataSyncInfo, JSONObject root) {
        try {
            //文件上传
            String now = DateUtils.format(new Date(), "yyyy_MM_dd_HH_mm_ss");
            ExecuteShellUtil shell = ExecuteShellUtil.getInstance();
            shell.init(config.getHiveHost(), config.getHivePort(),
                    config.getHiveUserName(), config.getHivePwd());
            String dataSyncDir = config.getDataSyncDir();
            String cmdHome = config.getCmdHome();
            String sqlFileName = dataSyncDir + "/" + dataSyncInfo.getDataLibrary() + "_" + dataSyncTableRecord.getOriginTable() + "_" + dataSyncTableRecord.getId() + ".json";
            String jsonString = root.toJSONString();
            shell.uploadFile(jsonString, sqlFileName);
            //生成shell命令脚本上传
            String logFile = dataSyncDir + "/" + dataSyncInfo.getDataLibrary() + "_" + dataSyncTableRecord.getOriginTable() + "_" + dataSyncTableRecord.getId() + ".log";
            StringBuilder command = new StringBuilder();
            command.append(cmdHome).append(" -mode ").append(config.getModelAppend())
                    .append(" -job ").append(sqlFileName)
                    .append(" -pluginRoot ").append(config.getPluginPath()).append("  -flinkconf ").append(config.getFlinkConf())
                    .append(" -yarnconf ").append("/etc/hadoop/conf")
                    .append(" -flinkLibJar ").append("/opt/cloudera/parcels/FLINK-1.10.1-BIN-SCALA_2.11/lib/flink/lib ")
                    .append(" -queue ").append(syncQueue)
                    .append(" -pluginLoadMode classpath ")
                    .append(">").append(logFile)
                    .append(" 2>&1 ").append(" & \n");
            String startFile = dataSyncDir + "/" + dataSyncInfo.getDataLibrary() + "_" + dataSyncTableRecord.getOriginTable() + "_" + dataSyncTableRecord.getId() + "start.sh";
            shell.uploadFile(command.toString(), startFile);
            shell.execCmd("cd " + dataSyncDir + " && sh " + startFile);
            shell.close();
        } catch (Exception e) {
            dataSyncInfo.setTaskStatus(DataSyncTaskEnum.TASK_STATUS_FAIL.getCode());
            dataSyncInfoMapper.updateById(dataSyncInfo);
            log.error("hive运行失败 ", e);
            e.printStackTrace();
            return R.error("hive运行失败" + e.getMessage());
        }
        return R.ok();
    }

    /**
     *
     * @param dbName 数据库
     * @param list 库中返回表
     * @param confConnect 连接信息
     * @param records 需要查询的表
     */
    private void getJDBCQueryTables(String dbName, List<String> list, ConfConnect confConnect,
                                    List<DataSyncMetaDataReturnDTO> records) {
        Connection conn = null;
        Statement statement = null;
        ResultSet rs = null;
        try {
            String sql = "";
            if (ConnectTypeEnum.MySQL.getCode().equals(confConnect.getTypeId())) {
                sql = String.format(DbSql.MYSQL_TABLES_SQL,dbName);
            }
            if (ConnectTypeEnum.Oracle.getCode().equals(confConnect.getTypeId())) {
                sql=DbSql.ORACLE_TABLES_SQL.replaceAll("#\\{userName}",confConnect.getUsername().toUpperCase());
                //sql = String.format(DbSql.ORACLE_TABLES_SQL,confConnect.getUsername().toUpperCase());
            }
            List<String> filterTables = new ArrayList<>();
            if (CollectionUtil.isNotEmpty(records)) {
                for (DataSyncMetaDataReturnDTO record : records) {
                    filterTables.add("'" + record.getTableName() + "'");
                }
                sql = String.format("%s and t.table_name in (%s)", sql, String.join(",", filterTables));
            }
            ConnectTypeEnum t = ConnectTypeEnum.ObjOf(confConnect.getTypeId());
            if (t == null) {
                return;
            }
            Class.forName(t.getDriverClass());
            String URL = warpUrl(confConnect, t);
            conn = DriverManager.getConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            if (conn != null && !conn.isClosed()) {
                statement = conn.createStatement();
                rs = statement.executeQuery(sql);
                while (rs.next()) {
                    String table_name = rs.getString("table_name");
                    if (!Pattern.matches(DataSyncTaskConstant.REGEX_RULE, table_name)) {
                        list.add(table_name);
                    }
                }
            }
        } catch (Exception e) {
            log.error("数据源连接异常",e);
            throw new RRException("数据源连接异常..........");
        } finally {
            if (rs != null) {
                try {
                    rs.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

            if (statement != null) {
                try {
                    statement.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

            if (conn != null) {
                try {
                    conn.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    private List<TableFieldInfo> getTableStructure(ConfConnect confConnect, String tableName, String dbName) {
        List<TableFieldInfo> tableStructureDTOS = new ArrayList<>();
        Connection conn = null;
        Statement statement = null;
        ResultSet rs = null;
        try {
            int i = 1;
            //如果是mysql
            String tableStructureSql = "";
            if (confConnect.getTypeId().equals(ConnectTypeEnum.MySQL.getCode())) {
                tableStructureSql = String.format(DbSql.MYSQL_COLUMN_SQL,dbName,tableName);
            }
            if (confConnect.getTypeId().equals(ConnectTypeEnum.Oracle.getCode())) {
                tableStructureSql = String.format(DbSql.ORACLE_COLUMN_SQL,tableName,confConnect.getUsername().toUpperCase());
            }
            log.info("SQL:{}", tableStructureSql);
            ConnectTypeEnum t = ConnectTypeEnum.ObjOf(confConnect.getTypeId());
            Class.forName(t.getDriverClass());
            String URL = warpUrl(confConnect, t);
            //conn = DruidUtil.getInstance().getOracleConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            conn = DriverManager.getConnection(URL, confConnect.getUsername(), confConnect.getPassword());
            if (conn != null && !conn.isClosed()) {
                statement = conn.createStatement();
                rs = statement.executeQuery(tableStructureSql);
                while (rs.next()) {
                    TableFieldInfo tableFieldInfo = new TableFieldInfo();
                    tableFieldInfo.setDbName(confConnect.getDbname());
                    tableFieldInfo.setTableName(tableName);
                    String field = rs.getString("column_name");
                    //过滤掉中文字段
                    if (field.length() != field.getBytes().length) {
                        continue;
                    }
                    tableFieldInfo.setFieldName(field);
                    String type = rs.getString("data_type");
                    String comment = rs.getString("comments");
                    if (StringUtils.isNotBlank(comment)) {
                        tableFieldInfo.setFieldAlias(comment);
                    } else {
                        tableFieldInfo.setFieldAlias(field);
                    }
                    if (type.contains("(")) {
                        String[] split = StringUtils.split(type, "(");
                        type = split[0];
                    }
                    if (!TableDataType.containsName(type)) {//如果字段类型在hive中不存在  都转成string
                        type = TableDataType.STRING.getName();
                    } else if (TableDataType.DATETIME.getName().equalsIgnoreCase(type)) {
                        type = TableDataType.TIMESTAMP.getName();
                    } else if (TableDataType.TIMESTAMP.getName().equalsIgnoreCase(type)) {
                        type = TableDataType.STRING.getName();
                    } else if (TableDataType.DATE.getName().equalsIgnoreCase(type)) {
                        type = TableDataType.STRING.getName();
                    }
                    type = MysqlToHiveTypeConvertUtil.convertType(type);
                    tableFieldInfo.setFieldType(type);
                    tableFieldInfo.setFieldOrder(i);
                    tableFieldInfo.setIsPartition(0);
                    tableStructureDTOS.add(tableFieldInfo);
                    i++;
                }
            }
            log.info("查询成功=================================");
        } catch (Exception e) {
            XxlJobLogger.log(e);
            e.printStackTrace();
            throw new RRException("数据连接异常...............");
        } finally {
            DBUtils.close(conn, statement, rs);
        }
        return tableStructureDTOS;
    }

    private Boolean createDbTable(TableInfo tableInfo, List<TableRegisterEntity> registerList, DataSyncInfo dataSyncInfo, ConfConnect confConnect, DataSyncTableRecord dataSyncTableRecord) {
        List<AllPermissionConfig> allPermisssionList = iAllPermissionConfigService.list();
        Integer tableLevel = 0, culonm = 0, enType = 0;
        if (CollectionUtil.isNotEmpty(allPermisssionList)) {
            List<AllPermissionConfig> tableList = allPermisssionList.stream().filter(allPermissionConfig -> Objects.equals(PressionParamType.TABLE.getId(), allPermissionConfig.getPermissType())).collect(Collectors.toList());
            if (CollectionUtil.isNotEmpty(tableList)) {
                tableLevel = tableList.get(0).getDataPermissionId();
            }
            List<AllPermissionConfig> culonmList = allPermisssionList.stream().filter(allPermissionConfig -> Objects.equals(PressionParamType.CULONM.getId(), allPermissionConfig.getPermissType())).collect(Collectors.toList());
            if (CollectionUtil.isNotEmpty(tableList)) {
                culonm = culonmList.get(0).getDataPermissionId();
            }
            List<AllPermissionConfig> enStypeList = allPermisssionList.stream().filter(allPermissionConfig -> Objects.equals(PressionParamType.ENSTYPE.getId(), allPermissionConfig.getPermissType())).collect(Collectors.toList());
            if (CollectionUtil.isNotEmpty(tableList)) {
                enType = enStypeList.get(0).getDataPermissionId();
            }
        }
        if (tableLevel != 0) {
            tableInfo.setTableLevelId(tableLevel);
        }
        try {
            SysSecurityDataLevelEntity defaultTableService = sysSecurityDataLevelService.getDefaultTableService();
            tableInfo.setSecurityDataLevel(defaultTableService.getName());
            QueryWrapper<TableInfo> param = new QueryWrapper<TableInfo>().eq("db_name", tableInfo.getDbName())
                    .eq("table_name", tableInfo.getTableName());
            tableInfoService.remove(param);
            tableInfo.setTenantId(dataSyncInfo.getTenantId());
            Boolean re = tableInfoService.save(tableInfo);
            if (re) {
                TableInfo info = tableInfoService.getOne(param);
                if (info == null) {
                    return false;
                } else {
                    List<TableRelationshipEntity> shipEntityList = new ArrayList<>();
                    List<TableFieldInfo> list = tableInfo.getTableFieldInfos();
                    for (TableFieldInfo fieldInfo : list) {
                        fieldInfo.setTableId(info.getId());
                        fieldInfo.setCreatePer(tableInfo.getCreatePer());
                        fieldInfo.setCreateTime(DateTime.now());
                        fieldInfo.setTenantId(dataSyncInfo.getTenantId());
                        if (0 != culonm) {
                            fieldInfo.setColcumLevelId(culonm);
                        }
                        if (0 != enType) {
                            fieldInfo.setEncyStyleId(enType);
                        }
                        fieldInfo.setSecurityDataLevel(defaultTableService.getName());

                        TableRelationshipEntity shipEntity = new TableRelationshipEntity();
                        shipEntity.setSourceTableId(info.getId());
                        shipEntity.setSourceTableName(info.getTableName());
                        shipEntity.setSourceFieldName(fieldInfo.getFieldName());
                        shipEntity.setRelationSourceId(confConnect.getId());
                        shipEntity.setRelationSourceName(confConnect.getName());
                        shipEntity.setRelationDbname(dataSyncInfo.getDataLibrary());
                        shipEntity.setRelationTableName(dataSyncTableRecord.getOriginTable());
                        shipEntity.setRelationTableDesc(info.getDesc());
                        shipEntity.setRelationFieldName(fieldInfo.getFieldName());
                        shipEntity.setRelationFieldType(fieldInfo.getFieldType());
                        shipEntity.setRelationFieldDesc(fieldInfo.getFieldAlias());
                        shipEntity.setRelationCollTableId(dataSyncTableRecord.getTableId());
                        shipEntityList.add(shipEntity);
                    }
                    tableFieldInfoService.saveBatch(list);

                    if (CollectionUtil.isNotEmpty(shipEntityList)) {
                        tableRelationshipService.remove(new QueryWrapper<TableRelationshipEntity>().eq("source_table_id", info.getId()));
                        tableRelationshipService.saveBatch(shipEntityList);
                    }

                    addRegister(info, registerList);

                    return true;
                }
            } else {
                return false;
            }
        } catch (Exception e) {
            return false;
        }
    }

    private void addRegister(TableInfo info, List<TableRegisterEntity> registerList) {
        TableRegisterEntity registerEntity = new TableRegisterEntity();
        registerEntity.setTableInfoId(info.getId());
        registerEntity.setRegisterType(TableRegisterTypeEnum.OFFLINE_SYN.getCode());
        registerEntity.setRunUser(info.getCreatePer().longValue());
        registerList.add(registerEntity);
    }

    private TableInfo getTableById(Integer tableId) {
        TableInfo table = tableInfoMapper.getTableById(tableId);
        table.setTableFieldInfos(tableFieldInfoMapper.selectListByTableIds(Collections.singleton(tableId)));
        return table;
    }

    private R createTable(TableInfo tableInfo, DataSyncInfo dataSyncInfo, List<TableRegisterEntity> registerList, ConfConnect confConnect, DataSyncTableRecord dataSyncTableRecord) {
//        ValidatorUtils.validateEntity(tableInfo);
//        //表名均为小写
//        TableInfo tableInfoServiceOne = tableInfoService.getOne(new LambdaQueryWrapper<TableInfo>().eq(TableInfo::getDbName, tableInfo.getDbName()).eq(TableInfo::getTableName, tableInfo.getTableName()));
//        tableInfo.setTableName(tableInfo.getTableName().toLowerCase());
//        if (tableInfoService.checkExistByName(tableInfo.getDbName(), tableInfo.getTableName())) {
//            //throw new RRException(DataDevelopmentBizExceptionEnum.TABLE_EXIST.getMessage());
//            return R.okWithData(tableInfoServiceOne.getCreatePer());
//        }
        if (com.baomidou.mybatisplus.core.toolkit.CollectionUtils.isEmpty(tableInfo.getTableFieldInfos())) {
            return R.error("表中字段不能为空！");
        }
        HiveTableUtil hiveTableUtil = null;
        try {
            hiveTableUtil = new HiveTableUtil(config.getHiveJdbcUrl(), dataSyncInfo.getToHiveDatabase(), "hdfs", "hdfs");
            R r = createHiveTable(tableInfo, dataSyncInfo.getToHiveDatabase());
            if (r.get("code").equals(0)) {
                if (createDbTable(tableInfo, registerList, dataSyncInfo, confConnect, dataSyncTableRecord)) {
                    //rangerDataService.saveRangerPolicyByTableId(tableInfo.getId(),ShiroUtils.getUserId());
                    TableInfo table = getTableById(tableInfo.getId());
                    Set<String> intiCreateTable = Sets.newHashSet();//初始化表创建者的权限
                    intiCreateTable.add(table.getDbName() + "|" + table.getTableName()
                            + "|" + table.getId() + "|" + table.getCreatePer());
                    rangerDataService.intiCreateTable(intiCreateTable);
                    //tableInfoService.initDataPermission(tableInfo.getId(), tableInfo.getRoleId());
                    return R.okWithData(table.getCreatePer());
                } else {
                    HdfsUtil hdfsUtil = null;
                    try {
                        hdfsUtil = new HdfsUtil(bdpJobConfig);
                        hiveTableUtil.dropTable(tableInfo.getTableName());
                        hdfsUtil.delete(HiveTableUtil.genTableLocation(tableInfo.getDbName(), tableInfo.getTableName()));
                        hdfsUtil.close();
                    } catch (Exception ee) {
                        if (null != hdfsUtil) {
                            hdfsUtil.close();
                        }
                    }
                    return R.error("数据库中创建表结构失败");
                }
            }
        } catch (Exception e) {
            return R.error("HIve建表失败，失败原因:" + e.getMessage());
        } finally {
            if (hiveTableUtil != null) {
                hiveTableUtil.close();
            }
        }
        return R.error("数据库新建表失败");
    }


    private R createHiveTable(TableInfo tableInfo, String toHiveDataBase) {

        List<String> cols = new ArrayList<>();
        List<String> pars = new ArrayList<>();
        String format = tableInfo.getFormat();
        String tableName = tableInfo.getTableName();

        List<TableFieldInfo> listFields = tableInfo.getTableFieldInfos();
        listFields.sort(Comparator.comparingInt(TableFieldInfo::getFieldOrder));
        for (TableFieldInfo fieldInfo : listFields
        ) {
            //Hive 中，可以用String、Date和Timestamp表示日期时间
            //String 用 yyyy-MM-dd 的形式表示  Date 用 yyyy-MM-dd 的形式表示  Timestamp 用 yyyy-MM-dd hh:mm:ss 的形式表示
            //所以没有datetime类型。date -> timestamp  ,  timestamp -> string
            if (!TableDataType.containsName(fieldInfo.getFieldType())) {//如果字段类型在hive中不存在  都转成string
                if (fieldInfo.getIsPartition() == 0) {
                    cols.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.STRING.getName());
                } else {
                    pars.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.STRING.getName());
                }
            }else  if (TableDataType.DATETIME.getName().equalsIgnoreCase(fieldInfo.getFieldType())) {
                //Error while compiling statement: FAILED: SemanticException [Error 10099]: DATETIME type isn't supported yet. Please use DATE or TIMESTAMP instead
                if (fieldInfo.getIsPartition() == 0) {
                    cols.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.TIMESTAMP.getName());
                } else {
                    pars.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.TIMESTAMP.getName());
                }
            } else if (TableDataType.TIMESTAMP.getName().equalsIgnoreCase(fieldInfo.getFieldType())) {
                if (fieldInfo.getIsPartition() == 0) {
                    cols.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.STRING.getName());
                } else {
                    pars.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.STRING.getName());
                }
            } else if (TableDataType.DATE.getName().equalsIgnoreCase(fieldInfo.getFieldType())) {
                if (fieldInfo.getIsPartition() == 0) {
                    cols.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.STRING.getName());
                } else {
                    pars.add("`" + fieldInfo.getFieldName() + "`" + " " + TableDataType.STRING.getName());
                }
            } else {
                if (fieldInfo.getIsPartition() == 0) {
                    cols.add("`" + fieldInfo.getFieldName() + "`" + " " + fieldInfo.getFieldType());
                } else {
                    pars.add("`" + fieldInfo.getFieldName() + "`" + " " + fieldInfo.getFieldType());
                }
            }
        }
        String paritions = "";
        if (pars.size() > 0) {
            paritions = String.format("partitioned by(%s)", StringUtils.join(pars.toArray(), ","));
        }
        String statment = "";
        if (DataFormat.text.getName().equals(format)) {
            DelimitFormat delimitFormat;
            if (null != tableInfo.getDelimitId())
                delimitFormat = DelimitFormat.ObjOf(tableInfo.getDelimitId());
            else
                throw new RRException(DataDevelopmentBizExceptionEnum.NO_DELIMIT.getMessage());
            if (null == delimitFormat) {
                throw new RRException(DataDevelopmentBizExceptionEnum.DELIMIT_NOT_EXITS.getMessage());
            }
            String express = delimitFormat.getExpress();
            statment = String.format("create EXTERNAL table IF NOT EXISTS %s (\n" +
                    "%s)\n" +
                    "%s\n" +
                    "%s\n" +
                    "row format delimited fields terminated by '%s'\n" +
                    "LINES TERMINATED BY '\\n'", tableName, StringUtils.join(cols.toArray(), ","), DataSyncTaskConstant.PARTITIONED_BY, paritions, express);
        }
        if (DataFormat.json.getName().equals(format)) {
            statment = String.format("create EXTERNAL table IF NOT EXISTS %s (\n" +
                    "%s)\n" +
                    "%s\n" +
                    "%s\n" +
                    "ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'\n" +
                    "STORED AS TEXTFILE", tableName, StringUtils.join(cols.toArray(), ","), DataSyncTaskConstant.PARTITIONED_BY, paritions);
        }
        if (DataFormat.parquet.getName().equals(format)) {
            for (String col : cols) {
                if (col.split(" ")[1].contains("DATE")) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.DATE_TYPE_NOT_SUPPORT.getMessage());
                }
            }
            statment = String.format("create EXTERNAL table IF NOT EXISTS %s (\n" +
                    "%s)\n" +
                    "%s\n" +
                    "%s\n" +
                    "stored as parquet", tableName, StringUtils.join(cols.toArray(), ","), DataSyncTaskConstant.PARTITIONED_BY, paritions);
        }
        HiveTableUtil hiveTableUtil = null;
        try {
            hiveTableUtil = new HiveTableUtil(config.getHiveJdbcUrl(), toHiveDataBase, "hdfs", "hdfs");
            log.info("-----------------------statment:{}", statment);
            // 先删除在创建表
            hiveTableUtil.execute(String.format("drop table  IF EXISTS  %s",tableInfo.getTableName()));
            hiveTableUtil.execute(statment);
            log.info("执行成功");
            hiveTableUtil.close();
        } catch (SQLException e) {
            hiveTableUtil.close();
            if (e.getMessage().contains("primary key")) {
                throw new RRException(DataDevelopmentBizExceptionEnum.PRIMARY_KEY_EXIST.getMessage());
            } else {
                throw new RRException(DataDevelopmentBizExceptionEnum.CREATE_TABLE_ERROR.getMessage() + "：" + e.getMessage());
            }
        } finally {
            hiveTableUtil.close();
        }
        return R.ok();


    }

    private JSONObject jsonDataProcess(ConfConnect confConnect, DataSyncTableRecord dataSyncTableRecord, String storageFormat, List<TableFieldInfo> list, DataSyncInfo dataSyncInfo, SysUserEntity sysUserEntity) {
        JSONObject jsonObject = new JSONObject();
        JSONArray jsonArray = null;
        String splitPK="";
        if (!CollectionUtils.isEmpty(list)) {
            jsonArray = new JSONArray();
            for (TableFieldInfo tableFieldInfo : list) {
                JSONObject jsonObject1 = new JSONObject();
                jsonObject1.put("key", tableFieldInfo.getFieldName());
                jsonObject1.put("type", tableFieldInfo.getFieldType());
                if (Integer.valueOf(1).equals(tableFieldInfo.getPrimaryKey())) {
                    splitPK=tableFieldInfo.getFieldName();
                }
                jsonArray.add(jsonObject1);
            }
        }
        jsonObject.put(dataSyncTableRecord.getPreviewTable().toLowerCase(), jsonArray);

        JSONObject root = new JSONObject();
        JSONObject job = new JSONObject();
        JSONArray contentArr = new JSONArray();
        JSONObject setting = new JSONObject();
        JSONObject content1 = new JSONObject();
        JSONObject reader = new JSONObject();
        JSONObject readerParameter = new JSONObject();
        JSONArray dbList = new JSONArray();
        JSONObject dbList1 = new JSONObject();
        dbList1.put("dbName", confConnect.getDbname());
        dbList.add(dbList1);
        JSONArray connection = new JSONArray();
        JSONObject connection1 = new JSONObject();
        JSONArray jdbcUrl = new JSONArray();
        if (StringUtils.equals(ConnectTypeEnum.MySQL.getName(), dataSyncInfo.getDataSourceType())) {
            jdbcUrl.add("jdbc:mysql://" + confConnect.getHost() + ":" + confConnect.getPort() + "/" + dataSyncInfo.getDataLibrary() + DataSyncTaskConstant.DB_CONSTANT);
        }
        if (StringUtils.equals(ConnectTypeEnum.Oracle.getName(), dataSyncInfo.getDataSourceType())) {
            jdbcUrl.add("jdbc:oracle:thin:@" + confConnect.getHost() + ":" + confConnect.getPort() + ":" + dataSyncInfo.getDataLibrary());
        }
        JSONArray table = new JSONArray();
        table.add(dataSyncTableRecord.getOriginTable());
        connection1.put("jdbcUrl", jdbcUrl);
        connection1.put("table", table);
        connection.add(connection1);
        JSONArray column = new JSONArray();
        column.add("*");
        readerParameter.put("dbList", dbList);
        readerParameter.put("connection", connection);
        readerParameter.put("username", confConnect.getUsername());
        readerParameter.put("password", confConnect.getPassword());
        readerParameter.put("column", column);
        reader.put("parameter", readerParameter);
        reader.put("name", ConnectTypeEnum.ObjOf(confConnect.getTypeId()).getName().toLowerCase() + "reader");
        JSONObject writer = new JSONObject();
        JSONObject hadoopConfig = new JSONObject();
        String dfsHaNamenodeNs = config.getDfsHaNamenodeNs();
        log.info(">>>>>>>>>>>>>>>dfsHaNamenodeNs{}", dfsHaNamenodeNs);
        String[] nodes = StringUtils.split(dfsHaNamenodeNs, ",");
        hadoopConfig.put("dfs.nameservices", config.getNameService());
        hadoopConfig.put("dfs.ha.namenodes." + config.getNameService(), config.getDfsHaNamenodeNs());
        hadoopConfig.put("dfs.namenode.rpc-address." + config.getNameService() + "." + nodes[0], config.getDfsRpcAddressNsNn2());
        hadoopConfig.put("dfs.client.failover.proxy.provider." + config.getNameService(), "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        hadoopConfig.put("dfs.namenode.rpc-address." + config.getNameService() + "." + nodes[1], config.getDfsRpcAddressNsNn1());
        hadoopConfig.put("dfs.client.use.datanode.hostname", "true");
        hadoopConfig.put("fs.hdfs.impl.disable.cache", "true");
        hadoopConfig.put("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        JSONObject parameter = new JSONObject();
        parameter.put("jdbcUrl", config.getHiveJdbcUrl() + dataSyncInfo.getToHiveDatabase());
        parameter.put("username", "user_" + sysUserEntity.getUserId());
        parameter.put("password", sysUserEntity.getPassword());
        parameter.put("fileType", storageFormat);
        parameter.put("fieldDelimiter", "&");
        parameter.put("writeMode", dataSyncInfo.getWriteRule());
        parameter.put("compress", "");
        parameter.put("charsetName", "UTF-8");
        parameter.put("tablesColumn", jsonObject.toString());
        parameter.put("partition", "pt");
        parameter.put("partitionType", "DAY");
        parameter.put("defaultFS", "hdfs://" + bdpJobConfig.getNamespace());
        parameter.put("hadoopConfig", hadoopConfig);
        parameter.put("splitPK", splitPK);
        writer.put("name", "hivewriter");
        writer.put("parameter", parameter);
        content1.put("reader", reader);
        content1.put("writer", writer);
        contentArr.add(content1);
        JSONObject errorLimit = new JSONObject();
        errorLimit.put("record", 100);
        JSONObject speed = new JSONObject();
        if (dataSyncInfo.getWriteFlowControl().equals(1)) {
            speed.put("bytes", 0);
        } else {
            speed.put("bytes", dataSyncInfo.getWriteFlowControlNum());
        }
        speed.put("channel", dataSyncInfo.getConcurrencyLimit());
        //若表没有主键  并发控制不能超过1
        if (StringUtils.isEmpty(splitPK)) {
            speed.put("channel", 1);
        }
        setting.put("errorLimit", errorLimit);
        setting.put("speed", speed);
        job.put("content", contentArr);
        job.put("setting", setting);
        job.put("memorySize", dataSyncInfo.getMemorySize());
        job.put("taskId", dataSyncInfo.getId());
        root.put("job", job);
        return root;
    }
}
