"""
Case Type   : portal执行流程优化（一）
Case Name   : Datakit进行反向迁移
Create At   : 2025/9/16
Owner       : lonely-dance
Description :
    1、修改参数;
    2、导入服务器和实例
    3、构造迁移数据
    4、安装迁移工具
    5、创建在线迁移任务
    6、启动迁移任务
    7、停止增量启动反向
    8、opengauss侧插入数据
    9、监控迁移进度
    10、清理环境
Expect      :
    1、修改成功
    2、导入成功
    3、成功
    4、成功
    5、创建成功
    6、启动成功
    7、成功
    8、成功
    9、成功
    10、清理环境成功
History     :
"""
import json
import os
import re
import time
import unittest
from datetime import datetime

from yat.test import Node
from yat.test import macro
from testcase.utils.ComThread import ComThread
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
from testcase.utils.datakit_api.CommonApi import CommonApi
from testcase.utils.datakit_api.DataMigration import DataMigrationApi
from testcase.utils.CommonDatakit import CommonDatakit
from testcase.utils.CommonMySQL import CommonMySQL
from testcase.utils.datakit_api.wss import Wss


class Tools(unittest.TestCase):
    def setUp(self):
        self.log = Logger()
        self.log.info(f"-----{os.path.basename(__file__)} start-----")
        self.mysql_node = Node('mysql5')
        self.mysql_root_node = Node('mysql5Root')
        self.primary_node = Node('PrimaryDbUser')
        self.primary_root_node = Node('PrimaryRoot')
        self.datakit_node = Node('Datakit')
        self.host_list = [self.datakit_node, self.mysql_node]
        self.db_list = [self.mysql_node, self.primary_node]
        self.com_sh = CommonSH('PrimaryDbUser')
        self.token = self.com_sh.get_token_value(macro.DATAKIT_PASSWD)
        self.log.info(self.token)
        self.assertNotEqual("error", self.token)
        self.com_api = CommonApi(self.token)
        self.datakit_sh = CommonDatakit(self.token)
        self.com_mysql = CommonMySQL('mysql5')
        self.data_migration_api = DataMigrationApi(self.token)
        self.task_name = re.search(r'Case\d+', os.path.basename(__file__)).group()
        self.source_db = 'source_db'
        self.target_db = 'target_db'
        self.task_id = 0
        self.subtask_id = 0
        self.og_host_id = ''
        self.log.info('----备份数据库参数文件----')
        cmd = f'''cd {macro.DB_INSTANCE_PATH};
        cp postgresql.conf bak_postgresql.conf;
        cp pg_hba.conf bak_pg_hba.conf;
        '''
        res = self.com_sh.exec_gs_ssh(cmd)
        self.log.info(res)
        self.assertNotIn('ERROR', res, '执行失败')

    def test_migration_datakit(self):
        text = '----step1:添加服务器及实例 expect:成功----'
        self.log.info(text)
        self.datakit_sh.import_host(self.host_list)
        self.datakit_sh.import_db(self.db_list)

        res = self.com_api.get_host_page(name=self.datakit_node.db_host)
        self.log.info(res.json())
        self.og_host_id = res.json()['rows'][0]['hostId']
        res = self.com_api.get_hostUser_page(self.og_host_id)
        self.log.info(res.json())
        og_host_user_id = ''
        for item in res.json()['rows']:
            if item['username'] == self.datakit_node.ssh_user:
                og_host_user_id = item['hostUserId']

        res = self.com_api.get_host_page(name=self.mysql_node.db_host)
        self.log.info(res.json())
        mysql_host_id = res.json()['rows'][0]['hostId']

        text = '----step2:创建迁移库并修改数据库参数 expect:成功----'
        self.log.info(text)
        cmd = f'''source {macro.DB_ENV_PATH};
        gsql -d postgres -p {self.primary_node.db_port} -c"drop database if exists target_db;create database target_db with dbcompatibility ='B' encoding 'utf8';\c target_db;";
        gsql -d postgres -p {self.primary_node.db_port} -c"alter database {self.target_db} set dolphin.lower_case_table_names = 0;";
        gs_guc set -N all -I all -h "host all all 0.0.0.0/0 sha256";
        gs_guc set -N all -I all -h "host replication all 0.0.0.0/0 sha256";
        gs_guc set -N all -I all -c "listen_addresses='*'";
        gs_guc set -N all -I all -c "wal_level=logical";
        gs_guc set -N all -I all -c "enable_slot_log = on";
        gs_om -t restart
        '''
        res = self.primary_node.sh(cmd).result()
        self.log.info(res)
        self.assertNotIn('ERROR', res, '执行失败:' + text)

        text = '----step3:mysql端构造数据 expect:成功----'
        self.log.info(text)
        sql = f'drop database if exists {self.source_db}; create database {self.source_db};'
        self.log.info(sql)
        res = self.com_mysql.execut_db_sql(sql)
        self.log.info(res)
        self.assertNotIn('ERROR', res, '执行失败：' + text)

        cmd = '''
            create table test1(id int primary key,name varchar(10));
            insert into test1 values(1,'a');
            insert into test1 values(2,'b');
        '''
        res = self.com_mysql.execut_db_sql(cmd, dbname=self.source_db, other_para='--ssl-mode=DISABLED')
        self.log.info(res)

        text = '----step4:安装迁移工具 expect:成功----'
        self.log.info(text)
        data = {
            'runHostId': self.og_host_id,
            'installPath': macro.PORTAL_INSTALL_PATH,
            'hostUserId': og_host_user_id,
            'installType': 0,
            'pkgDownloadUrl': None,
            'jarName': 'portalControl-1.0-SNAPSHOT-exec.jar',
            'portalVersion': 'EXPERIMENTAL',
            'thirdPartySoftwareConfig.thirdPartySoftwareConfigType': 2,
            'thirdPartySoftwareConfig.zookeeperPort': 12181,
            'thirdPartySoftwareConfig.kafkaPort': 19092,
            'thirdPartySoftwareConfig.installDir': f'{macro.PORTAL_INSTALL_PATH}/portal/tools/debezium/',
            'thirdPartySoftwareConfig.schemaRegistryPort': 18081,
            'portalType': 'MYSQL_ONLY',
            'pkgName': None,
            'pkgUploadPath': {}
        }
        response = self.data_migration_api.post_plugins_data_migration_resource_installPortal(host_id=self.og_host_id,
                                                                                              data=data)
        self.log.info(response.json())
        self.assertEqual(200, response.json()['code'], '执行失败:' + text)

        i = 0
        while True:
            response = self.data_migration_api.post_plugins_data_migration_resource_getHosts()
            self.log.info(response.json())
            i += 1
            if response.json()['data']['records'][0]['installInfo']['installStatus'] == 2:
                self.log.info('----portal安装成功----')
                break
            self.assertNotEqual(3, response.json()['data']['records'][0]['installInfo']['installStatus'],
                                'portal安装失败')
            if i > 10:
                self.assertEqual(2, response.json()['data']['records'][0]['installInfo']['installStatus'],
                                 'portal安装失败')
                break
            time.sleep(60)

        text = '----step5:创建在线迁移任务 expect:成功----'
        self.log.info(text)
        data = {"taskName": f"{self.task_name}", "globalParams": [], "hostIds": [f"{self.og_host_id}"],
                "tasks": [{"isAdjustKernelParam": False, "migrationModelId": "2", "sourceDb": f"{self.source_db}",
                           "sourceDbHost": f"{self.mysql_node.db_host}",
                           "sourceDbPass": f"{self.mysql_node.db_password}",
                           "sourceDbPort": self.mysql_node.db_port, "sourceDbUser": f"{self.mysql_node.db_user}",
                           "sourceNodeId": f"{mysql_host_id}",
                           "sourceSchemas": "", "seletedTbl": [], "sourceDbType": "MYSQL",
                           "targetDb": f"{self.target_db}",
                           "targetDbHost": f"{self.primary_node.db_host}",
                           "targetDbPass": f"{self.primary_node.db_password}",
                           "targetDbPort": self.primary_node.db_port, "targetDbUser": f"{self.primary_node.db_user}",
                           "targetDbVersion": None,
                           "targetNodeId": f"{self.og_host_id}", "isSystemAdmin": True, "sourceTables": "",
                           "taskParams": [{"paramKey": "rules.enable", "paramValue": "true",
                                           "paramDesc": "规则过滤，true代表开启，false代表关闭"}]}]}
        self.log.info(data)
        response = self.data_migration_api.post_plugins_data_migration_migration_save(data)
        self.log.info(response.json())
        self.assertEqual(200, response.json()['code'], '创建失败')

        response = self.data_migration_api.get_plugins_data_migration_migration_list()
        self.log.info(response.json())
        for item in response.json()['rows']:
            if item['taskName'] == self.task_name:
                self.task_id = item['id']
                break

        text = '----step6:启动迁移任务 expect:成功----'
        self.log.info(text)
        response = self.data_migration_api.post_plugins_data_migration_migration(cmd='start', task_id=self.task_id)
        self.log.info(response.json())
        self.assertEqual(200, response.json()['code'], '启动失败')

        j = 0
        while True:
            time.sleep(60)
            j += 1
            response1 = self.data_migration_api.get_plugins_data_migration_migration_subTasks(self.task_id)
            self.log.info(response1.json())
            self.subtask_id = response1.json()['rows'][0]['id']
            response2 = self.data_migration_api.get_plugins_data_migration_migration_alert_count(self.subtask_id)
            self.log.info(response2.json())
            if response1.json()['rows'][0]['execStatus'] == 8:
                self.assertTrue(response2.json()['data']['total'] == 0, '迁移出现异常告警')
                self.log.info('全量迁移完成，增量迁移中')
                break
            if j >= 10:
                self.assertEqual(8, response1.json()['rows'][0]['execStatus'], f'迁移任务{self.task_id}执行超时')
                break

        text = '----step7:停止增量迁移，启动反向迁移 expect:成功----'
        self.log.info(text)
        self.log.info('停止增量')
        response = self.data_migration_api.post_plugins_data_migration_migration_subTask_stop_incremental(
            self.subtask_id)
        self.log.info(response.json())

        j = 0
        while j <= 10:
            j += 1
            response = self.data_migration_api.get_plugins_data_migration_migration_subTasks(self.task_id)
            self.log.info(response.json())
            if response.json()['rows'][0]['execStatus'] == 10:
                self.log.info('反向迁移已停止')
                break
            time.sleep(30)
        response = self.data_migration_api.get_plugins_data_migration_migration_subTasks(self.task_id)
        self.log.info(response.json())
        self.assertEqual(10, response.json()['rows'][0]['execStatus'], f'增量迁移任务{self.task_id}停止超时')

        self.log.info('启动反向')
        response = self.data_migration_api.post_plugins_data_migration_migration_subTask_start_reverse(self.subtask_id)
        self.log.info(response.json())
        self.assertEqual(200, response.json()['code'], '启动反向失败')

        j = 0
        while True:
            j += 1
            response = self.data_migration_api.get_plugins_data_migration_migration_subTasks(self.task_id)
            self.log.info(response.json())
            if response.json()['rows'][0]['execStatus'] == 12:
                self.log.info('反向迁移已启动')
                break
            if j >= 10:
                self.assertEqual(12, response.json()['rows'][0]['execStatus'], f'反向迁移{self.task_id}启动超时')
                break
            time.sleep(30)

        text = '----step8:opengauss插入数据 expect：成功----'
        self.log.info(text)
        cmd = f'''use source_db;insert into test1 values(3,'c');insert into test1 values(4,'d');'''
        res = self.com_sh.execut_db_sql(cmd, dbname=f'{self.target_db}')
        self.log.info(res)
        self.assertNotIn('ERROR', res, '执行失败')

        text = '----step9:监控迁移进度，expect:迁移成功----'
        self.log.info(text)
        task_time = datetime.now().strftime("%Y%m%d%f")[:-3]
        wss_url = f'wss://{macro.DATAKIT_HOST}:{macro.DATAKIT_PORT}/ws/data-migration/taskInfo_{self.subtask_id}_{task_time}'
        wss_client = Wss(wss_url)

        wss_thread = ComThread(wss_client.start_wss_connection, args=())
        wss_thread.setDaemon(True)
        wss_thread.start()

        thread = ComThread(self.data_migration_api.get_plugins_data_migration_migration_subTaskInfo,
                           args=(self.subtask_id, f'taskInfo_{self.subtask_id}_{task_time}'))
        thread.setDaemon(True)
        thread.start()

        response = self.data_migration_api.get_plugins_data_migration_migration_subTasks(self.task_id)
        self.log.info(response.json())

        for i in range(10):
            time.sleep(30)
            last_msg = wss_client.get_last_message()
            if last_msg is None:
                self.log.info(f"第{i + 1}次检查：尚未收到消息")
                continue
            self.log.info(last_msg)
            msg = json.loads(last_msg)
            reverse_detail = json.loads(msg['reverseProcess']['execResultDetail'])
            if reverse_detail['successCount'] == 2:
                self.log.info('----反向迁移----')
                wss_client.stop_connection()
                break
            if i == 9:
                self.assertEqual(2, reverse_detail['successCount'], '增量迁移出现异常')
                wss_client.stop_connection()
                break

    def tearDown(self):
        text = f'----step10:清理环境 expect:成功----'
        self.log.info(text)
        response1 = self.data_migration_api.post_plugins_data_migration_migration(cmd='finish', task_id=self.task_id)
        self.log.info(response1.json())

        response2 = self.data_migration_api.delete_plugins_data_migration_migration(self.task_id)
        self.log.info(response2.json())

        response3 = self.data_migration_api.delete_plugins_data_migration_resource_deletePortal(self.og_host_id)
        self.log.info(response3.json())

        self.datakit_sh.delete_db()
        self.datakit_sh.delete_host()

        cmd = f'''
        source {macro.DB_ENV_PATH};
        gsql -d postgres -p {self.primary_node.db_port} -c"drop database if exists target_db;"
        '''
        res = self.primary_node.sh(cmd).result()
        self.log.info(res)

        sql = f'drop database if exists {self.source_db};'
        self.log.info(sql)
        res1 = self.com_mysql.execut_db_sql(sql)
        self.log.info(res1)

        cmd = f'''cd {macro.DB_INSTANCE_PATH};
        mv bak_postgresql.conf postgresql.conf;
        mv bak_pg_hba.conf pg_hba.conf;
        '''
        res2 = self.com_sh.exec_gs_ssh(cmd)
        self.log.info(res2)

        res3 = self.com_sh.restart_db_cluster()
        self.assertTrue(res3)
        self.assertEqual(200, response1.json()['code'], '停止迁移任务失败')
        self.assertEqual(200, response2.json()['code'], '删除迁移任务失败')
        self.assertEqual(200, response3.json()['code'], '卸载portal失败')
        self.assertNotIn('ERROR', res, '执行失败:清理数据库失败')
        self.assertNotIn('ERROR', res1, '执行失败：' + text)
        self.assertNotIn('ERROR', res2, '恢复配置文件失败')

        self.log.info(f"-----{os.path.basename(__file__)} finsh-----")
