package avicit.bdp.dcs.json.service.reader;

import avicit.bdp.common.utils.CheckParaUtils;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dcs.datasource.dto.JobDatasource;
import avicit.bdp.dcs.job.dto.JobDTO;
import avicit.bdp.dcs.plugin.BaseDataxParam;
import avicit.bdp.dcs.plugin.kafka.param.KafkaDataxParam;
import avicit.bdp.dcs.stream.dto.KafkaJsonDto;
import avicit.bdp.dcs.stream.dto.MsgTemplateDto;
import avicit.bdp.dcs.stream.service.MsgTemplateService;
import avicit.bdp.dcs.task.dto.ColumnMappingDto;
import avicit.bdp.dcs.task.dto.StreamColumnMappingDto;
import avicit.bdp.dcs.task.dto.TaskDTO;
import avicit.platform6.commons.utils.ComUtil;
import com.alibaba.fastjson2.JSONObject;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Component
public class StreamReaderBuilder {

  private static final Logger logger = LoggerFactory.getLogger(StreamReaderBuilder.class);

  @Autowired private MsgTemplateService msgTemplateService;

  private static final String KAFKA_CONSUMER_GROUP_ID_PREFIX = "console-consumer-";
  private static final String PREFIX = "prefix";
  private static final String VALUE = "value";
  private static final String TYPE = "type";
  private static final String NAME = "name";

  /** 构建流式采集Reader */
  public BaseDataxParam buildReaderByStream(
      TaskDTO taskDTO, JobDatasource readerDatasource, JobDTO jobDTO) {
    // 构建Reader
    KafkaDataxParam dataxParam = new KafkaDataxParam();
    KafkaJsonDto kafkaJsonDto = JSONObject.parseObject(jobDTO.getKafkaJson(), KafkaJsonDto.class);
    dataxParam.setKafkaTopic(kafkaJsonDto.getTopics());
    dataxParam.setBootstrapServers(readerDatasource.getJdbcUrl());
    dataxParam.setKafkaPartitions(kafkaJsonDto.getKafkaPartitions());
    dataxParam.setMsgId(kafkaJsonDto.getMsgId());
    dataxParam.setMsgSerializeType(kafkaJsonDto.getMsgAgreement());
    dataxParam.setGroupId(
        kafkaJsonDto.getGroupId() == null
            ? KAFKA_CONSUMER_GROUP_ID_PREFIX + ComUtil.getId()
            : kafkaJsonDto.getGroupId());
    dataxParam.setAutoOffestRest(kafkaJsonDto.getAutoOffestRest());

    Map<String, List<ColumnMappingDto>> templateMap = new HashMap<>();
    List<StreamColumnMappingDto> streamColumnMappingDtoList =
        JSONUtils.toList(taskDTO.getMappingColumn(), StreamColumnMappingDto.class);
    if (CollectionUtils.isNotEmpty(streamColumnMappingDtoList)) {
      for (StreamColumnMappingDto streamColumnMappingDto : streamColumnMappingDtoList) {
        List<ColumnMappingDto> columnMappingDtoList = streamColumnMappingDto.getColumnList();
        templateMap.put(streamColumnMappingDto.getTemplateId(), columnMappingDtoList);
      }
    }

    // 设置paras
    List<KafkaDataxParam.KafkaParam> parasList = new ArrayList<>();
    String selectedFile = jobDTO.getSelectedFile();
    String[] templateArr = selectedFile.split(Constants.COMMA);
    for (String templateId : templateArr) {
      KafkaDataxParam.KafkaParam dataxKafkaParasPojo = new KafkaDataxParam.KafkaParam();
      Map<String, Object> deviceMap = new HashMap<>();
      deviceMap.put(PREFIX, kafkaJsonDto.getMsgTemplate());

      // 获取设备id
      MsgTemplateDto msgTemplateDtoQuery = msgTemplateService.selectByPrimaryKey(templateId);
      deviceMap.put(VALUE, msgTemplateDtoQuery.getDeviceId());
      dataxKafkaParasPojo.setDeviceId(deviceMap);

      MsgTemplateDto msgTemplateDto = msgTemplateService.selectByPrimaryKey(templateId);
      CheckParaUtils.checkDbObjectValidity(
          msgTemplateDto, logger, String.format("没有查到数据，请确保templateId：%s的模板是否存在", templateId));

      List<ColumnMappingDto> columnMappingDtoList = templateMap.get(templateId);
      List<Map<String, Object>> columns = new ArrayList<>();
      for (ColumnMappingDto columnMappingDto : columnMappingDtoList) {
        Map<String, Object> map = new HashMap<>();
        if (StringUtils.isEmpty(columnMappingDto.getSrcColumnPrefix())) {
          map.put(NAME, columnMappingDto.getSrcColumnName());
        } else {
          map.put(
              NAME,
              columnMappingDto.getSrcColumnPrefix()
                  + Constants.DOT
                  + columnMappingDto.getSrcColumnName());
        }
        map.put(TYPE, columnMappingDto.getSrcColumnType());
        columns.add(map);
      }

      dataxKafkaParasPojo.setColumn(columns);
      parasList.add(dataxKafkaParasPojo);
    }

    dataxParam.setParas(parasList);
    return dataxParam;
  }
}
