package com.bst.etl.service.impl;

import com.alibaba.fastjson.JSON;
import com.bst.base.domain.BaseDb;
import com.bst.base.service.IBaseDbService;
import com.bst.common.constant.JobConstant;
import com.bst.common.core.text.Convert;
import com.bst.common.enums.DataSourceTypeEnum;
import com.bst.common.exception.base.BaseRuntimeException;
import com.bst.common.utils.LoginAPIUtil;
import com.bst.etl.domain.EtlDrawBaseDatax;
import com.bst.etl.mapper.EtlDrawBaseDataxMapper;
import com.bst.etl.service.IEtlDrawBaseDataxService;
import com.bst.etl.vo.DataxCreateParamVO;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.common.collect.ImmutableMap;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * dataxService业务层处理
 * 
 * @author ruoyi
 * @date 2024-02-01
 */
@Service
public class EtlDrawBaseDataxServiceImpl implements IEtlDrawBaseDataxService 
{
    @Autowired
    private EtlDrawBaseDataxMapper etlDrawBaseDataxMapper;
    @Autowired
    private IBaseDbService baseDbService;
    @Value("${doris.feload.url}")
    private String dorisFeloadUrl;
    @Value("${doris.beload.url}")
    private String dorisBeloadUrl;
    @Value("${doris.database}")
    private String dorisDataBase;

    @Override
    public List<EtlDrawBaseDatax> selectEtlDrawBaseDataxByIdTemplate(Integer idTmplate,String na) {
        return etlDrawBaseDataxMapper.selectEtlDrawBaseDataxByIdTemplate(idTmplate,na);
    }

    /**
     * 查询datax
     * 
     * @param idDrawDatax datax主键
     * @return datax
     */
    @Override
    public EtlDrawBaseDatax selectEtlDrawBaseDataxByIdDrawDatax(Long idDrawDatax)
    {
        return etlDrawBaseDataxMapper.selectEtlDrawBaseDataxByIdDrawDatax(idDrawDatax);
    }

    /**
     * 查询datax列表
     * 
     * @param etlDrawBaseDatax datax
     * @return datax
     */
    @Override
    public List<EtlDrawBaseDatax> selectEtlDrawBaseDataxList(EtlDrawBaseDatax etlDrawBaseDatax)
    {
        return etlDrawBaseDataxMapper.selectEtlDrawBaseDataxList(etlDrawBaseDatax);
    }

    /**
     * 新增datax
     * 
     * @param etlDrawBaseDatax datax
     * @return 结果
     */
    @Override
    public int insertEtlDrawBaseDatax(EtlDrawBaseDatax etlDrawBaseDatax)
    {
        String config = etlDrawBaseDatax.getConfig();
        ObjectMapper objectMapper = new ObjectMapper();
        JsonNode rootNode = null;
        try {
            rootNode = objectMapper.readTree(config);
            JsonNode readerNote = rootNode.get("job").findValues("reader").get(0);
            JsonNode whereSqlNode = readerNote.get("parameter").get("where");
            if(whereSqlNode==null && !"all".equals(etlDrawBaseDatax.getEuWhereType())) {
                etlDrawBaseDatax.setTbSour(null);
            }
        } catch (JsonProcessingException e) {
            e.printStackTrace();
        }
        etlDrawBaseDatax.setFgAct(1);
        etlDrawBaseDatax.setNaEmpCre(LoginAPIUtil.getLoginUsename());
        etlDrawBaseDatax.setDtSysCre(new Date());
        return etlDrawBaseDataxMapper.insertEtlDrawBaseDatax(etlDrawBaseDatax);
    }

    /**
     * 修改datax
     * 
     * @param etlDrawBaseDatax datax
     * @return 结果
     */
    @Override
    public int updateEtlDrawBaseDatax(EtlDrawBaseDatax etlDrawBaseDatax)
    {
        String config = etlDrawBaseDatax.getConfig();
        ObjectMapper objectMapper = new ObjectMapper();
        JsonNode rootNode = null;
        try {
            rootNode = objectMapper.readTree(config);
            JsonNode readerNote = rootNode.get("job").findValues("reader").get(0);
            JsonNode whereSqlNode = readerNote.get("parameter").get("where");
            if(whereSqlNode==null && !"all".equals(etlDrawBaseDatax.getEuWhereType())) {
                etlDrawBaseDatax.setTbSour(null);
            }
        } catch (JsonProcessingException e) {
            e.printStackTrace();
        }
        return etlDrawBaseDataxMapper.updateEtlDrawBaseDatax(etlDrawBaseDatax);
    }

    /**
     * 批量删除datax
     * 
     * @param idDrawDataxs 需要删除的datax主键
     * @return 结果
     */
    @Override
    public int deleteEtlDrawBaseDataxByIdDrawDataxs(String idDrawDataxs)
    {
        return etlDrawBaseDataxMapper.deleteEtlDrawBaseDataxByIdDrawDataxs(Convert.toStrArray(idDrawDataxs));
    }

    /**
     * 删除datax信息
     * 
     * @param idDrawDatax datax主键
     * @return 结果
     */
    @Override
    public int deleteEtlDrawBaseDataxByIdDrawDatax(Long idDrawDatax)
    {
        return etlDrawBaseDataxMapper.deleteEtlDrawBaseDataxByIdDrawDatax(idDrawDatax);
    }

    @Override
    public String buildJobJson(DataxCreateParamVO paramVO) {
        Map<String, Object> res = new LinkedHashMap<>();
        Map<String, Object> jobMap = new LinkedHashMap<>();
        jobMap.put("setting", buildSetting());
        jobMap.put("content", ImmutableList.of(buildContent(paramVO)));
        jobMap.put("jobMode","1");
        res.put("job", jobMap);

        //解决报错问题：在有总bps限速条件下，单个channel的bps值不能为空，也不能为非正数
        Map<String, Object> transportMap = Maps.newLinkedHashMap();
        Map<String, Object> channelMap = Maps.newLinkedHashMap();
        Map<String, Object> speedMap = Maps.newLinkedHashMap();
        Map<String, Object> byteMap = Maps.newLinkedHashMap();
        transportMap.put("transport",channelMap);
        channelMap.put("channel",speedMap);
        speedMap.put("speed",byteMap);
        byteMap.put("byte",1048576*5);
        res.put("core",transportMap);
        return JSON.toJSONString(res);
    }

    private Map<String, Object> buildSetting() {
        Map<String, Object> res = Maps.newLinkedHashMap();
        Map<String, Object> speedMap = Maps.newLinkedHashMap();
        Map<String, Object> errorLimitMap = Maps.newLinkedHashMap();
        speedMap.putAll(ImmutableMap.of("channel", 1, "byte", 5242880));
        errorLimitMap.putAll(ImmutableMap.of("record", 100, "percentage", 0.2));
        res.put("speed", speedMap);
        res.put("errorLimit", errorLimitMap);
        return res;
    }

    private Map<String, Object> buildContent(DataxCreateParamVO paramVO) {
        Map<String, Object> res = Maps.newLinkedHashMap();
        // DruidDataSource dataSourceReader = DynamicDataSource.getDataSourceById(paramVO.getIdDbReader()+"");
        // DruidDataSource dataSourceWriter = DynamicDataSource.getDataSourceById("dw");
        BaseDb dbReader = baseDbService.selectBaseDbById(paramVO.getIdDbReader());
        res.put("reader", buildReader(paramVO,dbReader));
        res.put("writer", buildWriter(paramVO));
        return res;
    }

    private Map<String, Object> buildReader(DataxCreateParamVO paramVO,BaseDb jobDatasource) {
        //构建
        Map<String, Object> readerObj = Maps.newLinkedHashMap();
        readerObj.put("name", jobDatasource.getSdDbtp()+"reader");
        Map<String, Object> parameterObj = Maps.newLinkedHashMap();
        Map<String, Object> connectionObj = Maps.newLinkedHashMap();

        parameterObj.put("username", jobDatasource.getNaUse());
        parameterObj.put("password", jobDatasource.getPwd());

        //判断是否是 querySql
        if (StringUtils.isNotBlank(paramVO.getQuerySql())) {
            connectionObj.put("querySql", ImmutableList.of(paramVO.getQuerySql()));
        } else {
            parameterObj.put("column", paramVO.getColumns());
            //判断是否有where
            if (StringUtils.isNotBlank(paramVO.getWhereSql()) && !"all".equals(paramVO.getEuWhereType())) {
                parameterObj.put("where", paramVO.getWhereSql());
            }
            connectionObj.put("table", new String[]{paramVO.getTableName()});
        }
        parameterObj.put("splitPk",paramVO.getSplitPk());
        connectionObj.put("jdbcUrl", ImmutableList.of(jobDatasource.getUrl()));

        parameterObj.put("connection", ImmutableList.of(connectionObj));

        readerObj.put("parameter", parameterObj);

        return readerObj;
    }

    public Map<String, Object> buildWriter(DataxCreateParamVO paramVO) {
        BaseDb jobDatasource = new BaseDb();
        jobDatasource.setSdBuscls("dw");
        List<BaseDb> dbList = baseDbService.selectBaseDbList(jobDatasource);
        if(dbList.size()>0) {
            jobDatasource = dbList.get(0);
        } else {
            throw new BaseRuntimeException(String.format("数据仓库未配置"));
        }
        Map<String, Object> writerObj = Maps.newLinkedHashMap();
        writerObj.put("name", jobDatasource.getSdDbtp()+"writer");

        Map<String, Object> parameterObj = Maps.newLinkedHashMap();
        parameterObj.put("duplicateErrorSkip",true);
        parameterObj.put("username", jobDatasource.getNaUse());
        parameterObj.put("password", jobDatasource.getPwd());
        parameterObj.put("column", paramVO.getColumns());
        parameterObj.put("preSql", splitSql(paramVO.getBeforeSql()));
        // parameterObj.put("postSql", splitSql(plugin.getPostSql()));
        Map<String, Object> connectionObj = Maps.newLinkedHashMap();
        connectionObj.put("table",  new String[]{paramVO.getTarTableName()});
        parameterObj.put("connection", ImmutableList.of(connectionObj));
        if(DataSourceTypeEnum.DORIS.equals(jobDatasource.getSdDbtp().toUpperCase())) {
            Map<String,Object> loadPropsMap = new LinkedHashMap<>();
            connectionObj.put("selectedDatabase",dorisDataBase);
            loadPropsMap.put("format","json");loadPropsMap.put("strip_outer_array",true);
            parameterObj.put("loadProps",loadPropsMap);
            parameterObj.put("maxBatchRows",200000);
            parameterObj.put("maxBatchByteSize",104857600);
            parameterObj.put("lineDelimiter","\n");
            connectionObj.put("jdbcUrl",jobDatasource.getUrl());
            parameterObj.put("loadUrl",dorisFeloadUrl.split(","));
        } else {
            connectionObj.put("jdbcUrl", jobDatasource.getUrl());
        }
        writerObj.put("parameter", parameterObj);
        return writerObj;
    }

    private String[] splitSql(String sql) {
        String[] sqlArr = null;
        if (StringUtils.isNotBlank(sql)) {
            Pattern p = Pattern.compile("\r\n|\r|\n|\n\r");
            Matcher m = p.matcher(sql);
            String sqlStr = m.replaceAll("");
            sqlArr = sqlStr.split(";");
        }
        return sqlArr;
    }
}
