package com.uh.rds.tester.data;

import com.uh.console.domain.RdsNode;
import com.uh.console.domain.vo.RdsServiceNodesVo;
import com.uh.console.enums.NodeStatusEnum;
import com.uh.rds.testing.config.TestConfig;
import com.uh.rds.testing.config.TestConfigManager;
import com.uh.rds.testing.conn.RdsConnectInfo;
import com.uh.console.client.ConsoleWebClient;
import com.uh.console.client.RdsNodeActions;
import com.uh.console.client.RdsServiceHelper;
import com.uh.rds.testing.data.TestDataGenerator;
import com.uh.rds.testing.utils.BatchDataTestUtils;
import com.uh.rds.testing.utils.RdsConnectionUtils;
import com.uh.rds.testing.validator.DataValidatorArguments;
import com.uh.rds.testing.validator.DataValidatorResult;
import com.uh.rds.testing.validator.DataValidatorRunner;
import org.junit.jupiter.api.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.util.List;
import java.util.Map;

import static com.uh.rds.testing.config.TestConfigManager.getConsoleClientConf;
import static com.uh.rds.testing.base.DataValidatorStep.PROCESS;
import static com.uh.rds.testing.base.DataValidatorStep.VALIDATE;

/**
 * 测试大数据量情况下(数据大于2GB)的情况下，存盘和恢复的情况。
 * 测试环境：
 *   - 一个测试登录开启的控制台。
 *   - 具备至少1个中心节点。
 *   - 建建立一个单分片的集群，1个主节点，至少1个备节点
 *   - 把服务备份存盘时间改为5分钟（cfg.xml中Server.DataDump设置为5)。
 *   - 服务节点JVM配置最大内存要配置15g或以上内存，建议值： -Xmx20g
 *   - 每一个测试的RDS节点最大耗用内存在15GB左右，所以被测主机要根据部署的节点数配置足够的空余内存。
 *   - 本测试用例运行时也建议加入 -Xmx5g 的JVM参数，保证测试过程中有足够的内存。
 * 测试过程说明：
 *   - 大数据量插入后，并验证数据是否正确。（分批插入)
 *   - 重启主节点，然后启动多个备节点。
 *   - 验证数据是否和插入的数据一致。
 *   - 清空数据。
 * 测试配置项说明：
 *       testServiceId: 48    //测试服务ID
 *       batchSize: 100000    //每个批次中每种数据的数据量（key的数量，不算子元素数量）
 *       batchCount: 30       //准备多少批次的数据
 *       usingBatchCount: 13   //使用多少批次的数据进行测试
 *       threadCount: 50      //并发线程数
 *
 */
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class BigDataTest {
    Logger logger = LoggerFactory.getLogger(this.getClass());

    private ConsoleWebClient client;
    private RdsServiceNodesVo serviceNodesVo;
    private RdsConnectInfo connection;
    private TestConfig conf;

    File baseDir = new File("data/BigDataTest");
    String stringDataFile = "string-data.csv";
    String setDataFile = "set-data.csv";
    String hashDataFile = "hash-data.csv";
    String listDataFile = "list-data.csv";
    String zsetDataFile = "zset-data.csv";
    String streamDataFile = "stream-data.csv";

    //List<MultiThreadDataTester> testers = null;

    @BeforeAll
    public void init() {
        logger.info("BigDataTest init ...");
        conf = TestConfigManager.getTestConfig("BigDataTest");
        Long testServiceId = conf.getLong("testServiceId");

        client = ConsoleWebClient.getInstance(getConsoleClientConf());
        client.login();

        //获取服务信息
        serviceNodesVo = RdsServiceHelper.getServiceInfo(testServiceId, client, false);
        connection = RdsConnectionUtils.getRdsConnection(serviceNodesVo);
    }


    @Test
    @Order(1)
    public void generateTestData() {
        logger.info("Preparing testing data ...");

        int batchSize = conf.getInteger("batchSize");
        int batchCount = conf.getInteger("batchCount");

        TestDataGenerator dataGenerator = new TestDataGenerator();

        boolean hasDataTask = false;
        for(int i = 0 ; i < batchCount ; i++) {
            File batchDir = new File(baseDir, "batch-" + i);
            if(!batchDir.exists()) { //批量目录不存在才生成数据
                Map<String, File> dataFiles = Map.of(
                        "string", new File(batchDir, stringDataFile),
                        "set", new File(batchDir, setDataFile),
                        "hash", new File(batchDir, hashDataFile),
                        "list", new File(batchDir, listDataFile),
                        "zset", new File(batchDir, zsetDataFile),
                        "stream", new File(batchDir, streamDataFile)
                );

                BatchDataTestUtils.addDataFiles(dataGenerator, dataFiles, false, batchSize, 5);
                hasDataTask = true;
            }
        }

        if(hasDataTask) {
            dataGenerator.executeTasks();
        }
        else {
            logger.info("Test data already exists, skip generating ...");
        }

    }

    /**
     * 恢复测试开始前的状态:
     *   主节点启动，从节点停止
     */
    @Test
    @Order(2)
    public void restoreBeginStatus() {
        //清空数据
        RdsConnectionUtils.flushDb(serviceNodesVo);

        //保证只启动主节点，从节点停止
        RdsNodeActions nodeAct = client.getRdsNodeActions();
        List<RdsNode> rdsNodes = serviceNodesVo.getNodes();
        for (RdsNode node : rdsNodes) {
            if (node.getMasterNode()) {
                if(node.getNodeStatusEnum() == NodeStatusEnum.STOP) {
                    nodeAct.startNodeWait(node.getNodeId());  //启动主节点
                }
            } else {
                if(node.getNodeStatusEnum() == NodeStatusEnum.START) {
                    nodeAct.stopNodeWait(node.getNodeId());  //停止从节点
                }
            }
        }

    }

    @Test
    @Order(3)
    void insertData() throws Exception {
        int usingBatchCount = conf.getInteger("usingBatchCount");

        for(int i = 0 ; i < usingBatchCount ; i++) {
            DataValidatorRunner tester = prepareTester(i);
            DataValidatorResult result = tester.runTest(PROCESS, VALIDATE);
            Assertions.assertTrue(result.isPassed());
            System.out.println(result.summary());
            System.out.println("Batch-" + i + " data inserted!");
        }
    }


    private DataValidatorRunner prepareTester(int batchIndex) throws Exception {
        int threadCount = conf.getInteger("threadCount");

        DataValidatorArguments config = new DataValidatorArguments();
        config.setValidateFromSlave(false);
        config.setThreadCount(threadCount);

        DataValidatorRunner tester = new DataValidatorRunner(config);

        File batchDir = new File(baseDir, "batch-" + batchIndex);

        Map<String, File> dataFiles = Map.of(
                "string", new File(batchDir, stringDataFile),
                "set", new File(batchDir, setDataFile),
                "hash", new File(batchDir, hashDataFile),
                "list", new File(batchDir, listDataFile),
                "zset", new File(batchDir, zsetDataFile),
                "stream", new File(batchDir, streamDataFile));

        BatchDataTestUtils.addValidators(tester, dataFiles);

        tester.prepareThreadsData(connection);
        logger.info("Tester-" + batchIndex + " prepared.");
        return tester;
    }


//    /**
//     * 批量插入Redis数据
//     * @throws Exception
//     */
//    @Test
//    @Order(3)
//    void insertRedisData() throws Exception {
//        int usingBatchCount = conf.getInteger("usingBatchCount");
//        int threadCount = conf.getInteger("threadCount");
//
//        RdsConnectInfo connection = new RdsConnectInfo(DeployMode.SINGLE, null, false);
//        connection.addEndpoint(new Endpoint("localhost", 6311, true,0));
//        RdsServiceHelper.flushDb(serviceNodesVo);
//
//        long begin = System.currentTimeMillis();
//
//        for(int i = 0 ; i < usingBatchCount ; i++) {
//            MultiThreadDataTester tester = createBatchTester(i);
//            tester.setTestTarget(connection);
//            DataTesterResult result = tester.dataTest(threadCount, PROCESS);
//            Assertions.assertTrue(result.isPassed());
//            System.out.println(result.summary());
//            System.out.println("Batch-" + i + " data inserted!");
//        }
//
//        long end = System.currentTimeMillis();
//        int durationSeconds = (int)((end - begin) / 1000);
//        System.out.println("Insert data cost " + durationSeconds + " seconds");
//
//    }


    @Test
    @Order(4)
    public void restartService() throws InterruptedException {

        //先重启主节点
        RdsNodeActions nodeAct = client.getRdsNodeActions();
        List<RdsNode> rdsNodes = serviceNodesVo.getNodes();
        for (RdsNode node : rdsNodes) {
            if (node.getMasterNode()) {
                nodeAct.stopNodeWait(node.getNodeId());
                Thread.sleep(5000);
                nodeAct.startNodeWait(node.getNodeId());
            }
        }

        Thread.sleep(10000);

        //启动备节点
        nodeAct.startNodesWait(rdsNodes.stream().filter(node -> !node.getMasterNode()).map(RdsNode::getNodeId).toArray(Long[]::new));

        long waitTimeInMs = conf.getInteger("waitTime") * 1000;
        Thread.sleep(waitTimeInMs);
    }

    @Test
    @Order(5)
    void validateData() throws Exception {
        int usingBatchCount = conf.getInteger("usingBatchCount");

        for(int i = 0 ; i < usingBatchCount ; i++) {
            System.out.println("Batch-" + i + " data validating ...");
            DataValidatorRunner tester = prepareTester(i);
            tester.getConfig().setValidateFromSlave(true);
            DataValidatorResult result = tester.runTest(VALIDATE);
            System.out.println(result.summary());
            Assertions.assertTrue(result.isPassed());
            System.out.println("Batch-" + i + " data validated.");
        }

    }


    /**
     * 因数据量大，追一删除太耗时，所有采用flushAll方式清空数据。
     */
    @Test
    @Order(6)
    @Disabled
    void clearTestData() {
        RdsConnectionUtils.flushDb(serviceNodesVo);
    }



}
