"""
Case Type   : reliability BM25 index
Case Name   : 并行重建索引过程注入故障，故障类型为主机数据库重启
Create At   : 2025/11/5
Owner       : chen
Description :
    1.创建表
    2.插入数据
    3.创建索引
    4.再次插入数据
    5.重建索引
    6.步骤5执行过程中，主机数据库stop
    7.恢复数据库
    8.再次重建索引
    9.主备均使用索引查询
    10.清理环境
Expect      :
    1.成功
    2.成功
    3.成功
    4.成功
    5.开始执行
    6.成功
    7.成功
    8.成功
    9.成功
    10.成功
History     :
"""

import os
import re
import time
import unittest

from yat.test import Node
from yat.test import macro

from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.ComThread import ComThread
from testcase.utils.Logger import Logger

log = Logger()
primary_sh = CommonSH('PrimaryDbUser')

class reliabability_bm25_index_case0021(unittest.TestCase):
    def setUp(self):
        log.info(f'--------{os.path.basename(__file__)} start--------')
        self.constant = Constant()
        self.com = Common()
        self.pri_node = Node('PrimaryDbUser')
        self.table_name = 't_relia_bm25_index_021'
        self.index_name = 'idx_t_relia_bm25_index_021'
        self.pkg_csv = 'bm25_csv.tar.gz'
        self.csv1w_path = os.path.join(macro.DB_SCRIPT_PATH, 'copy_10000.csv')
        self.csv1k_path = os.path.join(macro.DB_SCRIPT_PATH, 'copy_1000.csv')

    def test_index(self):
        text = '--------step1: 创建表; expect: 成功--------'
        log.info(text)
        cr_tab = f'''drop table if exists {self.table_name};
            create table {self.table_name} 
            (id bigserial primary key,val_text text);
            alter table {self.table_name} set (parallel_workers=32);'''
        res = primary_sh.execut_db_sql(cr_tab)
        log.info(res)
        self.assertIn(self.constant.CREATE_TABLE_SUCCESS, res)
        self.assertIn(self.constant.ALTER_TABLE_MSG, res)

        text = '--------step2: 插入数据; expect: 成功--------'
        log.info(text)
        check_cmd = f'if [ -f {self.csv1w_path} ]; '\
            f'then echo "True"; else echo "False"; fi'
        log.info(check_cmd)
        res = self.pri_node.sh(check_cmd).result()
        log.info(res)
        if res == 'False':
            self.com.wget_file(node=self.pri_node, 
                put_path=macro.DB_SCRIPT_PATH, 
                get_path=os.path.join(macro.FTP_PLUGINS_PATH,
                    self.pkg_csv))
            cmd = f"cd {macro.DB_SCRIPT_PATH} && tar -zxf {self.pkg_csv}"
            log.info(cmd)
            res = self.pri_node.sh(cmd).result()
            log.info(res)
        copy_sql = f"""copy {self.table_name} (val_text) from 
                '{self.csv1w_path}' with(DELIMITER '|');"""
        res = primary_sh.execut_db_sql(copy_sql)
        log.info(res)
        self.assertIn(self.constant.copy_success_msg, res)

        text = '--------step3: 创建索引; expect:开始执行--------'
        log.info(text)
        cr_ix = f'create index {self.index_name} on ' \
            f'{self.table_name} using bm25(val_text);'
        res = primary_sh.execut_db_sql(cr_ix)
        log.info(res)
        self.assertIn(self.constant.CREATE_INDEX_SUCCESS, res)

        text = '--------step4: 再次插入数据; expect:开始执行--------'
        log.info(text)
        copy_sql = f"""copy {self.table_name} (val_text) from 
                '{self.csv1k_path}' with(DELIMITER '|');"""
        res = primary_sh.execut_db_sql(copy_sql)
        log.info(res)
        self.assertIn(self.constant.copy_success_msg, res)

        text = '--------step5: 重建索引; expect:开始执行--------'
        log.info(text)
        re_ix_sql = f'reindex index {self.index_name};'
        ix_thread = ComThread(primary_sh.execut_db_sql,
                                        args=(re_ix_sql,))
        ix_thread.setDaemon(True)
        ix_thread.start()
        ix_thread.join(5)

        text = '--------step6: 步骤5执行过程中,主机数据库stop; expect:成功--------'
        log.info(text)
        res = primary_sh.stop_db_instance()
        log.info(res)
        self.assertIn(self.constant.GS_CTL_STOP_SUCCESS_MSG, res)

        text = '--------step7: 恢复数据库; expect:成功--------'
        log.info(text)
        res = primary_sh.start_db_instance()
        log.info(res)
        self.assertIn(self.constant.REBUILD_SUCCESS_MSG, res)

        text = '--------step8: 再次重建索引; expect:结果相同--------'
        log.info(text)
        res = primary_sh.execut_db_sql(re_ix_sql)
        log.info(res)
        self.assertIn(self.constant.REINDEX_SUCCESS_MSG, res)

        text = '--------step9: 主备均使用索引查询; expect:结果相同--------'
        log.info(text)
        time.sleep(5)
        res_list = []
        find_sql = f'set enable_indexscan=on;' \
            f'set enable_seqscan=off;' \
            f"select id,val_text <&> '《后汉书》与钢铁是怎样练成的文章内容比较' from " \
            f"{self.table_name} order by val_text <&> " \
            f"'《后汉书》与钢铁是怎样练成的文章内容比较' desc limit 5;"
        node_num = self.com.get_node_num(self.pri_node)
        res = primary_sh.execut_db_sql(find_sql)
        log.info(f"Primary res: {res}")
        res = re.sub(r'(id\s*)\|.*', r'\1', res)
        res = re.sub(r'^(\s*\d+)\s*\|.*$', r'\1', res, flags=re.MULTILINE)
        res_list.append(res)
        res_list.append(res)
        for i in range(1, node_num):
            res = CommonSH(f'Standby{i}DbUser').execut_db_sql(find_sql)
            log.info(f"Standby {i} res: {res}")
            res = re.sub(r'(id\s*)\|.*', r'\1', res)
            res = re.sub(r'^(\s*\d+)\s*\|.*$', r'\1', res, flags=re.MULTILINE)
            res_list.append(res)
            res_list.append(res)
        self.assertEqual(len(list(set(res_list))), 1)

    def tearDown(self):
        text = '--------step10: 清理环境; expect:成功--------'
        log.info(text)
        clear_sql = f'drop table if exists {self.table_name};'
        res = primary_sh.execut_db_sql(clear_sql)
        log.info(res)
        self.assertIn(self.constant.DROP_TABLE_SUCCESS, res)