"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.

openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:

          http://license.coscl.org.cn/MulanPSL2

THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type   : reindex
Case Name   : 使用reindex在线重建非法索引
Create At   : 2021/12/13
Owner       : @zou_jialiang050
Description :
    1.建表并插入数据
    2.向表中插入重复数据
    3.使用concurrently创建索引
    4.查询表t_reindex_concurrency_0006存在的索引
    5.删除重复数据
    6.使用reindex在线重建索引
    7.清理环境
Expect      :
    1.建表并插入数据成功
    2.向表中插入重复数据成功
    3.执行失败，遗留非法索引
    4.返回表t_reindex_concurrency_0006存在的索引
    5.删除重复数据成功
    6.使用reindex在线重建索引成功
    7.清理环境成功
History     :
"""

import os
import unittest
from yat.test import Node
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger


class SQL(unittest.TestCase):
    def setUp(self):
        self.logger = Logger()
        self.logger.info(f'-----{os.path.basename(__file__)} start-----')
        self.dbuserNode = Node('PrimaryDbUser')
        self.primary_sh = CommonSH('PrimaryDbUser')
        self.table = 't_reindex_concurrency_0006'
        self.index = 'i_reindex_concurrency_0006'
        self.Constant = Constant()

    def test_reindex(self):
        step1 = 'step1:建表并插入数据 expect:建表并插入数据成功'
        self.logger.info(step1)
        create_table = self.primary_sh.execut_db_sql(f'''
            drop table if exists {self.table};
            create table {self.table}(c_int int);
            insert into {self.table} (select * from generate_series(1,500000));
            ''')
        self.logger.info(create_table)
        self.assertIn(self.Constant.TABLE_DROP_SUCCESS, create_table,
                      "执行失败" + step1)
        self.assertIn(self.Constant.CREATE_TABLE_SUCCESS, create_table,
                      "执行失败" + step1)
        self.assertIn(self.Constant.INSERT_SUCCESS_MSG, create_table,
                      "执行失败" + step1)

        step2 = 'step2:向表中插入重复数据 expect:向表中插入重复数据成功'
        self.logger.info(step2)
        insert_data = self.primary_sh.execut_db_sql(
            f'''insert into {self.table} (select * from generate_series(1,100))
            ;''')
        self.logger.info(insert_data)
        self.assertIn(self.Constant.INSERT_SUCCESS_MSG, insert_data,
                      "向表中插入重复数据失败" + step2)

        step3 = 'step3:使用concurrently创建索引 expect:执行失败,' \
                '留下非法索引'
        self.logger.info(step3)
        create_index = self.primary_sh.execut_db_sql(
            f'''drop index if exists {self.index};
            create unique index concurrently {self.index} on {self.table}
            (c_int);''')
        self.logger.info(create_index)
        self.assertIn(self.Constant.DROP_INDEX_SUCCESS_MSG, create_index,
                      "执行失败" + step3)
        self.assertIn('ERROR:  could not create unique index '
                      f'"{self.index}"', create_index, "执行失败" + step3)

        step4 = 'step4:查询表t_reindex_concurrency_0006存在的索引' \
                ' expect:返回表t_reindex_concurrency_0006存在的索引'
        self.logger.info(step4)
        create_index = self.primary_sh.execut_db_sql(f'''\\d {self.table} ''')
        self.logger.info(create_index)
        self.assertIn(f'"{self.index}" UNIQUE, btree (c_int) TABLESPACE '
                      f'pg_default INVALID', create_index,
                      "使用concurrently重建索引执行失败" + step4)

        step5 = 'step5:删除重复数据 expect:删除重复数据成功'
        self.logger.info(step5)
        del_duplicate_data = self.primary_sh.execut_db_sql(
            f'''delete from {self.table} where c_int<101;''')
        self.logger.info(del_duplicate_data)
        self.assertIn(self.Constant.DELETE_SUCCESS_MSG, del_duplicate_data,
                      "删除重复数据失败" + step5)

        step6 = 'step6:使用reindex在线重建索引' \
                'expect:使用reindex在线重建索引成功'
        self.logger.info(step6)
        reindex = self.primary_sh.execut_db_sql(
            f'''reindex index concurrently {self.index};''')
        self.logger.info(reindex)
        self.assertIn('REINDEX', reindex, "执行失败" + step6)

    def tearDown(self):
        step7 = 'step7:清理环境 expect:清理环境成功'
        self.logger.info(step7)
        clean_environment = self.primary_sh.execut_db_sql(
            f'''drop table {self.table} cascade;''')
        self.logger.info(clean_environment)
        self.assertIn(self.Constant.DROP_TABLE_SUCCESS, clean_environment,
                      "执行失败" + step7)
        self.logger.info(f'-----{os.path.basename(__file__)} end-----')
