package com.feidee.data.report.service.impl;

import com.feidee.data.report.model.CustomReportModel;
import com.feidee.data.report.model.SqlInfoModel;
import com.feidee.data.report.model.multidimensional_analysis.MultiDimAnalysisMetaReq;
import com.feidee.data.report.model.multidimensional_analysis.MultiDimAnalysisQueryReq;
import com.feidee.data.report.service.DataReporterService;
import com.feidee.data.report.service.MultidimensionalAnalysisService;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.sql.*;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;

@Service
public class MultidimensionalAnalysisServiceImpl implements MultidimensionalAnalysisService {

  private static final Logger LOGGER = LoggerFactory.getLogger(MultidimensionalAnalysisServiceImpl.class);

  @Autowired
  private DriverManagerDataSource multidimQueryDataSource;

  private Random random = new Random();

  @Resource(name = "reporterService")
  private DataReporterService reporterService;

  @Override
  public List<Map<String, Object>> query(MultiDimAnalysisQueryReq req) throws SQLException {
    List<Map<String, Object>> results = new ArrayList<>();
    // 'select col1, col2, col3' part
    StringBuilder builder = new StringBuilder("select ");
    builder.append(StringUtils.join(req.getDims(), ","));
    if (req.getIndicators() != null && !req.getIndicators().isEmpty()) {
      builder.append(",");
      builder.append(StringUtils.join(req.getIndicators(), ","));
    }
    builder.append(" from ");
    // 'from table' part, only support 1 table for now
    builder.append(req.getDbAndTable().get(0));
    // 'where col = 1' part
    if (req.getFilters() != null && !req.getFilters().isEmpty()) {
      builder.append(" where ");
      builder.append(StringUtils.join(req.getFilters(), " or "));
    }
    // 'group by col1' part
    builder.append(" group by ");
    builder.append(StringUtils.join(req.getDims(), ","));
    // order by
    if (StringUtils.isNoneEmpty(req.getOrderBy())) {
      builder.append(" order by ");
      builder.append(req.getOrderBy());
    }
    // limit for safe
    builder.append(" limit " + req.getLimit());

    LOGGER.info("多维度查询SQL：{}", builder.toString());

    Connection connection = null;
    try {
      connection = multidimQueryDataSource.getConnection();
      PreparedStatement statement = connection.prepareStatement(builder.toString());
      ResultSet resultSet = statement.executeQuery();
      ResultSetMetaData metaData = resultSet.getMetaData();

      while (resultSet.next()) {
        Map<String, Object> map = new HashMap<>();
        for (int j = 1; j <= metaData.getColumnCount(); j++) {
          map.put(metaData.getColumnName(j), resultSet.getObject(j));
        }
        results.add(map);
      }
    } finally {
      if (connection != null && !connection.isClosed()) connection.close();
    }
    LOGGER.info("多维度查询结果：共计 {} 条数据", results.size());
    return results;
  }

  @Override
  public String upsertMeta(MultiDimAnalysisMetaReq req) throws InterruptedException, ExecutionException, TimeoutException {
    CustomReportModel customReportModel = new CustomReportModel();

    customReportModel.setId("mda_" + Base64.getEncoder().
            encodeToString(DigestUtils.md5(req.getDbAndTable().trim().getBytes())).substring(0, 16));
    customReportModel.setPriority("中");
    customReportModel.setApplyname("多维分析" + req.getDbAndTable());
    customReportModel.setDataResources(req.getDbAndTable());
    customReportModel.setMysqlColumn(req.getDims());
    customReportModel.setApplysqlname("multidimAnalysis_" + req.getDbAndTable().replace(".", "_"));
    Map<String, SqlInfoModel> querySqls = new HashMap<>();
    StringBuilder distinctCols = new StringBuilder();
    req.getDims().keySet().forEach(name -> {
      distinctCols.append(" " + name + " as " + name + ",");
    });
    StringUtils.join(req.getDims().keySet(), ",");
    SqlInfoModel model = new SqlInfoModel();
    model.setSqlSource("hive");
    model.setSqlType("insert");
    model.setSqlContent("select distinct " + StringUtils.removeEnd(distinctCols.toString(), ",") + " from " + req.getDbAndTable());
    model.setSqlHistory(model.getSqlContent());
    querySqls.put("sql1", model);

    customReportModel.setHivesql(querySqls);

    Future future = reporterService.customReportOpr(customReportModel);
    future.get(60, TimeUnit.SECONDS);
    String result = reporterService.crmPublishOpr(customReportModel);
    LOGGER.info("多维分析元数据发布结束");

    return customReportModel.getApplysqlname();
  }
}
