package com.sh.data.engine.domain.integration.job;

import com.google.common.collect.Lists;
import com.sh.data.engine.common.constants.Constants.RedisKey;
import com.sh.data.engine.common.enumDefinition.DSType;
import com.sh.data.engine.common.util.DataUtil;
import com.sh.data.engine.domain.base.model.enums.Database;
import com.sh.data.engine.domain.integration.datasource.service.DataSourceService;
import com.sh.data.engine.domain.shims.DbManagerFactory;
import com.sh.data.engine.domain.shims.db.BaseDbManager;
import com.sh.data.engine.domain.shims.db.DbOptions;
import com.sh.data.engine.domain.shims.hbase.util.HBaseUtil;
import com.sh.data.engine.domain.shims.hdfs.util.HdfsUtil;
import com.sh.data.engine.domain.shims.kafka.util.KafkaUtil;
import com.sh.data.engine.domain.shims.mongo.MongoOptions;
import com.sh.data.engine.domain.shims.mongo.manager.MongoManager;
import com.sh.data.engine.domain.shims.mqtt.util.MQTTUtil;
import com.sh.data.engine.job.core.handler.annotation.XxlJob;
import com.sh.data.engine.repository.dao.integration.datasource.entity.DataSourceEntity;
import com.sh.data.engine.repository.dao.integration.datasource.entity.DataSourceEntity.HDFSConfig;
import com.sh.data.engine.repository.dao.integration.datasource.entity.DataSourceEntity.HiveConfig;
import com.sh.data.engine.repository.dao.integration.datasource.entity.DataSourceEntity.MongoConfig;
import com.sh.data.engine.repository.dao.integration.datasource.entity.DataSourceEntity.RdbmsConfig;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.lucene.util.NamedThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;

import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author: mengzheng.mhc
 * @date: 2024/6/17 16:56
 */
@Component("dataSourceCheckStatusJob")
public class DataSourceCheckStatusJob {

    private static final Logger log = LoggerFactory.getLogger(DataSourceCheckStatusJob.class);

    @Value("${data.engine.datasource-check.enabled:false}")
    private boolean enabled;

    @Value("${data.engine.datasource-check.time:3}")
    private int time;

    @Autowired
    private DataSourceService dataSourceService;

    @Autowired
    private RedisTemplate<String, String> redisTemplate;

    private static final ExecutorService pool =
        new ThreadPoolExecutor(
            Runtime.getRuntime().availableProcessors() + 1,
            Runtime.getRuntime().availableProcessors() + 1,
            0,
            TimeUnit.MICROSECONDS,
            new LinkedBlockingQueue<>(100),
            new NamedThreadFactory("datasource-check-pool"),
            new ThreadPoolExecutor.AbortPolicy());

    /**
     * 任务：数据源状态任务 调度周期：每一小时更新一次
     */
    //  @Scheduled(cron = "0 0/1 * * * ?")
    @XxlJob("dataSourceCheckJobHandler")
    public void dataSourceCheckJobHandler() {
        if (!enabled) {
            log.warn("task checkDataSourceStatus is not enable, skip");
            return;
        }
        Date date = new Date();
        try {
            redisTemplate.opsForValue().set(RedisKey.DATASOURCE_LAST_UPDATE, DataUtil.format(date));
            run0();
        } catch (Exception e) {
            log.error("check dataSource status error", e);
        }
    }

    private void run0() throws InterruptedException {
        List<DataSourceEntity> dataSourceActive = dataSourceService.getDataSourceActive();
        if (CollectionUtils.isEmpty(dataSourceActive)) {
            log.info("got 0 data sources to check status");
            return;
        }

        int timeout = time;
        long sss = System.currentTimeMillis();
        int total = dataSourceActive.size();
        AtomicInteger index = new AtomicInteger(0);
        CountDownLatch cdl = new CountDownLatch(total);
        List<Long> succeedDS = Lists.newCopyOnWriteArrayList();
        List<Long> failedDS = Lists.newCopyOnWriteArrayList();
        for (DataSourceEntity source : dataSourceActive) {
            Future<Integer> submit =
                pool.submit(
                    () -> {
                        int i = index.getAndIncrement();
                        long start = System.currentTimeMillis();
                        // check datasource
                        // batch update, not one by one
                        return runOnce(source);
                    });

            try {
                // 任务执行超过3s直接打断
                Integer status = submit.get(timeout, TimeUnit.SECONDS);
                if (status == 0) {
                    failedDS.add(source.getId());
                } else {
                    succeedDS.add(source.getId());
                }
                long end = System.currentTimeMillis();
                cdl.countDown();
            } catch (InterruptedException | ExecutionException | TimeoutException e) {
                // 任务执行超过3秒，默认状态失败
                submit.cancel(true);
                log.error(
                    "update data source status timeout({}), dsType {} dsLink {} status {} time {}ms",
                    total,
                    source.getDsType(),
                    source.getDsLink(),
                    0,
                    timeout);
                failedDS.add(source.getId());
                cdl.countDown();
            }
        }
        cdl.await();
        // batch update status
        updateDataSourceStatus(succeedDS, failedDS);

        log.info(
            "check datasource status finished, size={}, cost={}ms.",
            total,
            (System.currentTimeMillis() - sss));
    }

    private void updateDataSourceStatus(List<Long> successDS, List<Long> failedDS) {
        // update success
        if (CollectionUtils.isNotEmpty(successDS)) {
            dataSourceService.updateStatusByIds(1, successDS);
        }
        // update failure
        if (CollectionUtils.isNotEmpty(failedDS)) {
            dataSourceService.updateStatusByIds(0, failedDS);
        }
    }

    private Integer runOnce(DataSourceEntity source) {
        Long id = source.getId();
        String dsLink = source.getDsLink();

        String dsTypeString = source.getDsType();
        DSType dsType = DSType.from(dsTypeString);
        if (Objects.isNull(dsType)) {
            return 0;
        }
        // 测试是否连接成功
        try {
            if (DSType.isRdbms(dsTypeString)) {
                RdbmsConfig rdbmsConfig = source.getRdbmsConfig();
                String password = rdbmsConfig.getPassword();
                String username = rdbmsConfig.getUsername();
                BaseDbManager dbManager = getDbManager(dsTypeString, username, password, dsLink);
                dbManager.testConnection();
            }
            switch (dsType) {
                case Hive:
                    HiveConfig hiveConfig = source.getHiveConfig();
                    String username = hiveConfig.getUsername();
                    String password = hiveConfig.getPassword();
                    String hiveSiteAddress = hiveConfig.getHiveSiteAddress();
                    BaseDbManager dbManager = getDbManager(dsTypeString, username, password, dsLink, hiveSiteAddress);
                    dbManager.testConnection();
                    break;
                case HBase:
                    HBaseUtil.testConnection(dsLink);
                    break;
                case Kafka:
                    KafkaUtil.testConnection(dsLink, 3);
                    break;
                case MQTT:
                    MQTTUtil.testConnection(dsLink, 3);
                    break;
                case Mongodb:
                    MongoConfig mongoConfig = source.getMongoConfig();
                    MongoOptions mongoOptions = new MongoOptions();
                    mongoOptions.setAddressList(Lists.newArrayList(dsLink));
                    mongoOptions.setAuthDbName(mongoConfig.getAuthDbName());
                    mongoOptions.setDbName(mongoConfig.getDbName());
                    mongoOptions.setUsername(mongoConfig.getUsername());
                    mongoOptions.setPassword(mongoConfig.getPassword());
                    MongoManager mongoManager = new MongoManager(mongoOptions);
                    mongoManager.testConnection();
                    break;
                case HDFS:
                    HDFSConfig hdfsConfig = source.getHdfsConfig();
                    String hdfsUser = hdfsConfig.getHdfsUser();
                    String connectionProperty = source.getConnectionProperty();
                    HdfsUtil.testConnection(hdfsUser, dsLink, connectionProperty);
                    break;
            }

        } catch (Exception e) {
            log.error(
                "update data source status error, id {} dsTypeString {} dsLink {}", id, dsTypeString, dsLink, e);
            return 0;
        }
        return 1;
    }

    private static BaseDbManager getDbManager(
        String dsType, String username, String password, String jdbcLink) {
        Database database = Database.from(dsType);
        DbOptions dbOptions = new DbOptions(database, jdbcLink, username, password);
        return DbManagerFactory.getDbManager(database, dbOptions);
    }

    private static BaseDbManager getDbManager(
        String dsType, String username, String password, String jdbcLink, String hiveSiteXmlPath) {
        Database database = Database.from(dsType);
        DbOptions dbOptions = new DbOptions(database, jdbcLink, username, password, hiveSiteXmlPath);
        return DbManagerFactory.getDbManager(database, dbOptions);
    }
}
