"""
Case Type   : Query_Dop并行查询
Case Name   : 创建带压缩的列存表,设置query_dop=2，执行explain，查看是否启用并行查询
Create At   : 2022/11/03
Owner       : @peilinqian
Description :
    1、创建带压缩的列存表
    2、向列存表中插入数据
    3、对分区表执行analyze操作
    4、设置query_dop=2,work_mem=64kB,使用explain查看分区表是否启用并行查询
    5、清理环境
Expect      :
    1、创建带压缩的列存表成功
    2、插入数据成功
    3、对分区表执行analyze操作成功
    4、设置query_dop=2,work_mem=64kB,使用explain查看分区表，成功启用并行查询
    5、清理环境成功
History     :
    Modified by peilinqian at 2024-12-2:研发代码变更，并行串行代价进行比对，即使开了并行，
    串行代价小，仍会走串行。设置work_mem较小，以保障可以进入并行计划
"""

import os
import unittest

from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger


class QueryDopCase(unittest.TestCase):

    def setUp(self):
        self.logger = Logger()
        self.logger.info(f'----{os.path.basename(__file__)}:start----')
        self.constant = Constant()
        self.commonsh = CommonSH("PrimaryDbUser")
        self.common = Common()
        self.initial_query_dop = self.common.show_param("query_dop")
        self.t_name_sc = "t_score_0016"
        self.t_name_stu = "t_student_0016"

    def test_query_dop(self):
        step1_text = "---step1:创建带压缩的列存表;expect:建表成功---"
        self.logger.info(step1_text)
        sql_cmd1 = f'''drop table if exists {self.t_name_sc};
            create table {self.t_name_sc}(
            s_id int,
            s_score int,
            s_course char(8))
            with (orientation = column)
            compress;
            drop table if exists {self.t_name_stu};
            create table {self.t_name_stu}(
            s_id int,
            s_name char(8))
            with (ORIENTATION = COLUMN, COMPRESSION=HIGH);'''
        self.logger.info(sql_cmd1)
        sql_res1 = self.commonsh.execut_db_sql(sql_cmd1)
        self.logger.info(sql_res1)
        self.assertIn("CREATE TABLE", sql_res1, "执行失败" + step1_text)

        step2_text = "---step2:为列存表插入数据;expect:插入数据成功---"
        self.logger.info(step2_text)
        sql_cmd2 = f'''insert into {self.t_name_sc} values(
            generate_series(1, 1000000), 
            random()*100,
            'course');
            insert into {self.t_name_stu} values(
            generate_series(1, 100000), 
            'name');'''
        self.logger.info(sql_cmd2)
        sql_res2 = self.commonsh.execut_db_sql(sql_cmd2)
        self.logger.info(sql_res2)
        self.assertIn("INSERT", sql_res2, "执行失败:" + step2_text)

        step3_text = "---step3:对列存表执行analyze操作;expect:操作成功---"
        self.logger.info(step3_text)
        analyse_res = self.commonsh.execut_db_sql(
            f'''analyze {self.t_name_sc};
                analyze {self.t_name_stu};''')
        self.logger.info(analyse_res)
        self.assertIn("ANALYZE", analyse_res, "执行失败:" + step3_text)

        step4_text = "---step4:设置query_dop=2,work_mem=64kB," \
                     "explain查看是否启用并行查询; expect:并行查询启用成功---"
        self.logger.info(step4_text)
        explain_res = self.commonsh.execut_db_sql(
            f'''set work_mem = '64kB'; set query_dop=2;
            show work_mem;show query_dop;
            explain select count(*) from {self.t_name_stu} a 
            join {self.t_name_sc} b on a.s_id = b.s_id 
            group by a.s_id;''')
        self.logger.info(explain_res)
        assert_info = "Vector Streaming(type: LOCAL GATHER dop: 1/2)"
        self.assertIn(assert_info, explain_res, "执行失败:" + step4_text)

    def tearDown(self):
        self.logger.info("---清理环境---")
        drop_text = "---删除列存表---"
        self.logger.info(drop_text)
        drop_cmd = f'''drop table if exists {self.t_name_sc};
            drop table if exists {self.t_name_stu};'''
        self.logger.info(drop_cmd)
        drop_res = self.commonsh.execut_db_sql(drop_cmd)
        self.logger.info(drop_res)

        self.logger.info(f'----{os.path.basename(__file__)}:end----')
        self.assertIn("DROP TABLE", drop_res, "执行失败" + drop_text)
