"""
Case Type   : datakit录制回放工具
Case Name   : 创建录制回放任务时修改自定义配置，参数设置异常，执行时有合理报错
Create At   : 2025/3/27
Owner       : @lonely-dance
Description :
    1.导入服务器和实例
    2.创建源端目标端库
    3.创建录制回放任务
    4.下载配置录制回放工具，参数设置异常参数
    5.启动任务
    6.删除任务
    7.清理环境
Expect      :
    1.成功
    2.成功
    3.成功
    4.成功
    5.有合理报错
    6.删除任务成功
    7.成功
History     :
"""

import os
import re
import time
import unittest
from datetime import datetime, timedelta

from yat.test import Node
from yat.test import macro
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
from testcase.utils.datakit_api.CommonApi import CommonApi
from testcase.utils.datakit_api.DataMigration import DataMigrationApi
from testcase.utils.CommonDatakit import CommonDatakit
from testcase.utils.CommonMySQL import CommonMySQL


class Tools(unittest.TestCase):
    def setUp(self):
        self.log = Logger()
        self.log.info(f"-----{os.path.basename(__file__)} start-----")
        self.mysql_node = Node('mysql5')
        self.mysql_root_node = Node('mysql5Root')
        self.primary_node = Node('PrimaryDbUser')
        self.primary_root_node = Node('PrimaryRoot')
        self.datakit_node = Node('Datakit')
        self.host_list = [self.primary_root_node, self.mysql_root_node]
        self.db_list = [self.mysql_node, self.primary_node]
        self.com_sh = CommonSH('PrimaryDbUser')
        self.token = self.com_sh.get_token_value(macro.DATAKIT_PASSWD)
        self.log.info(self.token)
        self.assertNotEqual("error", self.token)
        self.com_api = CommonApi(self.token)
        self.datakit_sh = CommonDatakit(self.token)
        self.data_migration_api = DataMigrationApi(self.token)
        self.com_mysql = CommonMySQL('mysql5')
        self.task_name = re.search(r'Case\d+', os.path.basename(__file__)).group()
        self.source_db = 'transcribe'
        self.target_db = 'replay_db'
        self.task_id = 0

    def test_transcribe_replay(self):
        text = '----step1:添加服务器 expect:成功----'
        self.log.info(text)
        self.datakit_sh.import_host(self.host_list)
        self.datakit_sh.import_db(self.db_list)

        text = '----step2:创建源端和目标端库----'
        self.log.info(text)
        sql1 = f'drop database if exists {self.source_db}; create database {self.source_db};'
        self.log.info(sql1)
        res1 = self.com_mysql.execut_db_sql(sql1)
        self.log.info(res1)
        sql2 = f"""source {macro.DB_ENV_PATH};
        gsql -d postgres -p {self.primary_node.db_port} -c "drop database if exists {self.target_db};
                                      create database {self.target_db} with dbcompatibility 'B';"
        gsql -d {self.target_db} -p {self.primary_node.db_port} -c "CREATE SCHEMA {self.source_db}";
        """
        res2 = self.primary_node.sh(sql2).result()
        self.log.info(res2)

        text = '----step3:创建录制回放任务 expect:成功----'
        self.log.info(text)
        self.log.info('获取版本号')
        cmd = f"cd {macro.DATAKIT_INSTALL_PATH};" \
              f"ls | grep openGauss | sed -nE 's/.*openGauss-datakit-([0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z]+)?).*/\\1/p'"
        version = self.datakit_node.sh(cmd).result()
        self.log.info(version)
        self.log.info('获取node_id')
        response_source = self.data_migration_api.get_data_migration_resource('sourceClusters')
        source_node_id = response_source.json()['data']['sourceClusters'][0]['nodes'][0]['clusterNodeId']
        response_target = self.data_migration_api.get_data_migration_resource('targetClusters')
        target_node_id = response_target.json()['data']['targetClusters'][0]['clusterNodes'][0]['nodeId']
        self.log.info('----创建任务----')
        data = {"taskName": f"{self.task_name}", "sourceDbType": "MySQL", "sourceIp": f"{self.mysql_node.ssh_host}",
                "sourcePort": f'{self.mysql_node.db_port}',
                "sourceInstallPath": f"{macro.DB_MY_MYPATH}1", "targetIp": f"{self.primary_node.ssh_host}",
                "targetPort": self.primary_node.db_port, "targetInstallPath": f"{macro.DB_MY_MYPATH}2",
                "sourceUser": f"{self.mysql_root_node.ssh_user}",
                "targetUser": f"{self.primary_root_node.ssh_user}", "dbMap": [f"{self.source_db}:{self.target_db}"],
                "toolVersion": f"{version}",
                "taskType": "transcribe_replay", "sourceNodeId": f"{source_node_id}",
                "targetNodeId": f"{target_node_id}"}
        response = self.data_migration_api.post_plugins_data_migration_transcribeReplay_save(data)
        self.log.info(response.json())
        self.assertIsNotNone(response.json()['data'], '执行失败' + text)
        self.log.info('----安装录制回放工具并配置错误的参数----')
        self.task_id = response.json()['data']
        data = {"sql.transcribe.mode": "tcpdump", "tcpdump.network.interface": "lo", "tcpdump.capture.duration": 1,
                "tcpdump.file.name": "tcpdump-file", "tcpdump.file.size": 10,
                "tcpdump.database.ip": f"{self.mysql_root_node.ssh_host}",
                "tcpdump.database.port": f"{self.mysql_node.db_port}", "queue.size.limit": 10000,
                "packet.batch.size": 10000,
                "should.send.file": "true", "should.check.system": "false", "max.cpu.threshold": 0.85,
                "max.memory.threshold": 0.85, "max.disk.threshold": 0.85,
                "remote.receiver.name": f"{self.primary_root_node.ssh_user}",
                "remote.node.ip": f"{self.primary_root_node.ssh_host}",
                "remote.node.port": f"{self.primary_root_node.ssh_port}", "remote.retry.count": 1,
                "result.file.size": 10, "parse.select.result": "false",
                "tcpdump.node.port": f"{self.mysql_root_node.ssh_host}",
                "sql.storage.mode": "json", "sql.file.name": "sql-file", "sql.file.size": 0,
                "result.file.name": "select-result", "parse.max.time": 0, "file.count.limit": 0,
                "sql.replay.strategy": "parallel", "sql.replay.multiple": 1, "sql.replay.only.query": "false",
                "sql.replay.parallel.max.pool.size": 0, "sql.replay.slow.sql.rule": "3",
                "sql.replay.slow.time.difference.threshold": 1000, "sql.replay.slow.sql.duration.threshold": 1000,
                "sql.replay.slow.top.number": 5, "sql.replay.session.white.list": "[]",
                "sql.replay.session.black.list": "[]", "sql.replay.database.ip": f"{self.primary_root_node.ssh_host}",
                "sql.replay.database.port": f"{self.primary_root_node.db_port}",
                "sql.replay.database.schema.map": [f"{self.source_db}:{self.target_db}"],
                "sql.replay.database.username": f"{self.primary_root_node.db_user}",
                "sql.replay.database.password": f"{self.primary_root_node.db_password}",
                "sql.replay.draw.interval": 100, "replay.max.time": 0, "source.time.interval.replay": "false",
                "compare.select.result": "false", "tcpdump.file.id": ""}
        response = self.data_migration_api.post_plugins_data_migration_transcribeReplay_downloadAndConfig(self.task_id,
                                                                                                          data)
        self.log.info(response.json())
        self.assertEqual(200, response.json()['code'])
        time.sleep(30)
        self.log.info('查看任务是否正常创建')
        response = self.data_migration_api.get_plugins_data_migration_transcribeReplay_list()
        self.log.info(response.json())
        self.assertIn(f"\"id\":{self.task_id}", response.text, '任务未正常创建')
        for task in response.json()['rows']:
            if task['id'] == self.task_id:
                status = task['executionStatus']
                self.assertEqual(0, status, '工具未下载')
                break
        text = '----step4:执行任务 expect:合理报错----'
        self.log.info(text)
        self.data_migration_api.post_plugins_data_migration_transcribeReplay_start(self.task_id)

        self.log.info('----查看录制回放结果----')
        time_start = datetime.now()
        task = {}
        while True:
            time.sleep(30)
            time_end = datetime.now()
            response = self.data_migration_api.get_plugins_data_migration_transcribeReplay_list()
            self.log.info(response.json())
            for item in response.json()['rows']:
                if item['id'] == self.task_id:
                    task = item
                    break
            if time_end - time_start >= timedelta(seconds=600):
                self.assertIn('ERROR', task.get('errorMsg'), '执行失败：' + text)
                break
            if 'ERROR' in task.get('errorMsg'):
                break

    def tearDown(self):
        text = '----step5:清理环境----'
        self.log.info(text)
        self.log.info('----删除录制回放任务----')
        response = self.data_migration_api.post_plugins_data_migration_transcribeReplay_delete(f'{self.task_id}')
        self.log.info(response.json())

        self.log.info('----删除数据库----')
        sql = f'drop database if exists {self.source_db};'
        self.log.info(sql)
        res1 = self.com_mysql.execut_db_sql(sql)
        self.log.info(res1)
        res2 = self.com_sh.execut_db_sql(sql)
        self.log.info(res2)

        self.log.info('----删除服务器----')
        self.datakit_sh.delete_host()
        self.log.info('----删除实例----')
        self.datakit_sh.delete_db()

        self.assertNotIn('ERROR', res1, '删除数据库失败')
        self.assertNotIn('ERROR', res2, '删除数据库失败')
        self.assertEqual(200, response.json()['code'], '执行失败：' + text)
        self.log.info(f"-----{os.path.basename(__file__)} finsh-----")
