package cn.com.zetatech.loader.common.db;

import cn.com.zetatech.loader.common.enums.DbTypeEnums;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

/**
 * StarRocks数据库大数据插入/更新时使用，通过http提交数据，小批量数据通过mysql插入
 */
@Component
@Slf4j
public class StarRocksDbService {

    @Value("${zeta.store.starRocks.batchCombineDataMaxSize:10000}")
    private int starRockBatchCombineDataMaxSize;
    @Autowired
    private DefaultDBConfig defaultConfig;
    /**
     * 批量合并数据,适用于数据库对于数据批次数有限制的场景
     * 比如starrocks，并发批次默认为1000，修改合并数据大小，减少批次数
     */
    ConcurrentHashMap<String, List<Map<String, Object>>> batchCombineData = new ConcurrentHashMap<>();

    /**
     * 缓存的数据一段时间未更新，执行入库，一般发生在最后一批数据，不满足合批大小入库的条件，需要定时任务执行入库
     */
    ConcurrentHashMap<String, Long> batchCombineDataUpdateTime = new ConcurrentHashMap<>();


    @Scheduled(fixedRate = 5_000)
    private void batchInsertCombineData() {
        if (CollectionUtils.isEmpty(batchCombineDataUpdateTime)) {
            return;
        }
        long currentTime = System.currentTimeMillis();
        for (Map.Entry<String, Long> entry : batchCombineDataUpdateTime.entrySet()) {
            if (currentTime - entry.getValue() >= 5_000) {
                List<Map<String, Object>> data = batchCombineData.remove(entry.getKey());
                if (CollectionUtils.isEmpty(data)) {
                    continue;
                }
                batchInsertStarrocks(entry.getKey(), data);
                log.info("定时任务执行数据入库，业务ID：" + entry.getKey() + "，条数：" + data.size());
                batchCombineDataUpdateTime.remove(entry.getKey());
            }
        }
    }


    public String getDbType() {
        return StringUtils.isEmpty(defaultConfig.getDbType()) ? "oracle" : defaultConfig.getDbType();
    }


    /**
     * @param tableName 数据库表名
     * @param data      数据,key：表字段名，value：字段值
     */
    public void batchInsert(String tableName, List<Map<String, Object>> data) {
        if (CollectionUtils.isEmpty(data)) {
            return;
        }
        batchCombineData.compute(tableName, (key, cacheData) -> {
            if (cacheData == null) {
                cacheData = data;
            } else {
                cacheData.addAll(data);
                batchCombineData.put(key, cacheData);
            }
            if (cacheData.size() >= starRockBatchCombineDataMaxSize) {
                batchInsertStarrocks(key, cacheData);
                log.info("满足数据条数条件执行数据入库，业务ID：" + key + "，条数：" + cacheData.size());
                return null;
            } else {
                return cacheData;
            }
        });
        batchCombineDataUpdateTime.put(tableName, System.currentTimeMillis());
    }


    /**
     * 针对STARROCKS数据库数据批量插入
     */
    private void batchInsertStarrocks(String tableName, List<Map<String, Object>> data) {
        try {
            StarrocksConfigInfo starrocksConfigInfo = new StarrocksConfigInfo(defaultConfig);
            //数据表字段应该包含所有的fmt定义的字段
            Set<String> setKeys = data.get(0).keySet();
            List<String> dataColumns = new ArrayList<String>(setKeys);
            String columns = String.join(",", dataColumns);

            log.info("starrrocks start http stream loader data, tableName = " + tableName + ", columns = " + columns);

            StringBuilder stringBuilder = new StringBuilder();
            int columnNum = dataColumns.size();
            for (int i = 0; i < data.size(); i++) {
                String oneRow = "";
                Map<String, Object> dataMap = data.get(i);
                for (int j = 0; j < columnNum; j++) {
                    Object object = dataMap.get(dataColumns.get(j));
                    if (object == null) {
                        object = "\\N";
                    }
                    if (j == columnNum - 1) {
                        oneRow = oneRow + object + "\n";
                    } else {
                        oneRow = oneRow + object + "\t";
                    }
                }
                stringBuilder.append(oneRow);
            }
            stringBuilder.deleteCharAt(stringBuilder.length() - 1);
            String loadData = stringBuilder.toString();
            StarRocksStreamLoad starrocksStreamLoad = new StarRocksStreamLoad();
            starrocksStreamLoad.sendData(starrocksConfigInfo, tableName, columns, loadData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
            throw new RuntimeException(e);
        }
    }


}
