import json
import re
import traceback
import unittest

import jsonpath_ng
from sqloxide import parse_sql

import utils
from base_parser import JsonParser
from log import LOGGER


class IndexInfo:
    TYP_PK = 0
    TYP_UNI = 1
    TYP_IDX = 2

    TYP_DESC_MAP = {TYP_PK: "primary key", TYP_UNI: "unique key", TYP_IDX: "key"}

    def __init__(self, name, typ, flds):
        self.name = name
        self.typ = typ
        self.flds = flds

    def __repr__(self):
        return "{%s %s%s}" % (self.TYP_DESC_MAP.get(self.typ), self.name, self.flds)

    @staticmethod
    def type_desc(idx_type):
        return IndexInfo.TYP_DESC_MAP.get(idx_type)


class DBDataType:
    BTP_VARCHAR = "Varchar"
    BTP_CHAR = "Char"
    BTP_DECIMAL = "Decimal"
    BTP_TIMESTAMP = "Timestamp"
    BTP_DATETIME = "Datetime"
    BTP_TINY = "TinyInt"
    BTP_INT = "Int"
    BTP_BIGINT = "BigInt"

    def __init__(self, basic_type, scope):
        self.basic_type = basic_type
        self.scope = scope

    def __eq__(self, other):
        if isinstance(other, DBDataType):
            if self.basic_type != other.basic_type:
                return False

            if self.basic_type in (self.BTP_VARCHAR, self.BTP_CHAR, self.BTP_DECIMAL):
                return self.scope == other.scope
            else:
                return True
        else:
            return False


class ColumnInfo:
    def __init__(self, name, comment, is_nullable, data_type, dft_value=None):
        self.name = name
        self.is_nullable = is_nullable
        self.comment = comment
        self.data_type = data_type
        self.is_auto_incr = False
        self.is_pk = False
        self.dft_value = dft_value

    def has_default(self):
        return self.dft_value is not None

    @staticmethod
    def from_alter_drop(drop_col_def):
        return ColumnInfo(
            drop_col_def.get("column_name").get("value"), None, False, None
        )

    @staticmethod
    def from_alter_add(add_col_def):
        opt_list, opt_dict = ColumnInfo.parse_create_options(add_col_def.get("options"))
        col_info = ColumnInfo(
            add_col_def.get("name").get("value"),
            opt_dict.get("Comment"),
            "NotNull" not in opt_list,
            ColumnInfo.parse_data_type(add_col_def),
            opt_dict.get("Default")
        )
        col_info.is_auto_incr = ColumnInfo.decide_auto_increment(opt_dict)
        col_info.is_pk = ColumnInfo.decide_pk(opt_dict)
        return col_info

    @staticmethod
    def from_alter_modify(modify_col_def):
        opt_list, opt_dict = ColumnInfo.parse_alter_modify_options(
            modify_col_def.get("options")
        )
        return ColumnInfo(
            modify_col_def.get("col_name").get("value"),
            opt_dict.get("Comment"),
            "NotNull" not in opt_list,
            ColumnInfo.parse_data_type(modify_col_def),
            opt_dict.get("Default")
        )

    @staticmethod
    def from_create(col_def):
        opt_list, opt_dict = ColumnInfo.parse_create_options(col_def.get("options"))
        col_info = ColumnInfo(
            col_def.get("name").get("value"),
            opt_dict.get("Comment"),
            "NotNull" not in opt_list,
            ColumnInfo.parse_data_type(col_def),
            opt_dict.get("Default")
        )
        col_info.is_auto_incr = ColumnInfo.decide_auto_increment(opt_dict)
        col_info.is_pk = ColumnInfo.decide_pk(opt_dict)
        return col_info

    @staticmethod
    def decide_pk(opt_dict: dict) -> bool:
        # Unique.is_primary
        return opt_dict.get('Unique', {}).get('is_primary', False)

    @staticmethod
    def decide_auto_increment(opt_dict: dict) -> bool:
        for spec in opt_dict.get('DialectSpecific', []):
            kw = spec.get('Word', {}).get("keyword")
            if kw == 'AUTO_INCREMENT':
                return True
        return False

    @staticmethod
    def parse_alter_modify_options(opts):
        opt_list = []
        opt_dict = {}
        for opt_value in opts:
            if isinstance(opt_value, dict):
                opt_dict.update(opt_value)
            else:
                opt_list.append(opt_value)
        return opt_list, opt_dict

    @staticmethod
    def parse_create_options(opts):
        opt_list = []
        opt_dict = {}
        for opt in opts:
            opt_value = opt.get("option")
            if isinstance(opt_value, dict):
                opt_dict.update(opt_value)
            else:
                opt_list.append(opt_value)
        return opt_list, opt_dict

    @staticmethod
    def parse_data_type(col_def):
        data_type_json = col_def.get("data_type")

        basic_type = None
        scope = None
        for k in data_type_json:
            basic_type = k
            if basic_type in (DBDataType.BTP_CHAR, DBDataType.BTP_VARCHAR):
                if data_type_json[k] is not None:
                    scope = data_type_json[k].get("IntegerLength").get("length")
                else:
                    scope = 1
            elif basic_type == DBDataType.BTP_DECIMAL:
                scope = data_type_json[k].get("PrecisionAndScale")
            break
        return DBDataType(basic_type, scope)


class DeltaInfo:
    ALTER_TP_NONE = 0
    ALTER_TP_ADD = 1
    ALTER_TP_MOD = 2
    ALTER_TP_DROP = 3

    def __init__(self, obj, alter_type):
        self.obj = obj
        self.alter_type = alter_type


class TableInfo:
    def __init__(self, name):
        self.name = name
        self.column_def = {}
        self.index_def = {}
        self.all_index_flds = None
        self.if_not_exists = False
        self.has_create_like = False
        self.create_like_source = None

    def get(self, col_name) -> ColumnInfo:
        return self.column_def.get(col_name)

    def add_columns(self, col_dict: dict):
        self.column_def.update(col_dict)

    def modify_columns(self, col_dict: dict) -> dict:
        before_mod = {}
        for col_name in col_dict:
            if col_name in self.column_def:
                before_mod[col_name] = self.column_def[col_name]

        self.column_def.update(col_dict)
        return before_mod

    def drop_columns(self, col_dict: dict):
        for k in col_dict:
            if k in self.column_def:
                del self.column_def[k]

    def collect_index(
            self, index_name, index_typ, index_flds, alter_type=DeltaInfo.ALTER_TP_NONE
    ):
        idx_info = IndexInfo(index_name, index_typ, index_flds)
        if alter_type in (DeltaInfo.ALTER_TP_NONE, DeltaInfo.ALTER_TP_ADD):
            if tuple(index_flds) in self.index_def:
                LOGGER.warning("fields:%s already defined to %s in table:%s, ignore" % (index_flds,
                                                                                        IndexInfo.type_desc(
                                                                                            self.index_def[tuple(
                                                                                                index_flds)].typ),
                                                                                        self.name))
            else:
                self.index_def[tuple(index_flds)] = idx_info
        elif alter_type == DeltaInfo.ALTER_TP_DROP:
            for k in self.index_def:
                if self.index_def[k].name == index_name:
                    del self.index_def[k]
                    return
            LOGGER.warning("drop index:%s NOT exists, skip" % index_name)

    def has_pk_or_uniquekey(self):
        for idx_info in self.index_def.values():
            if idx_info.typ in (IndexInfo.TYP_PK, IndexInfo.TYP_UNI):
                return True
        return False

    def get_index_def(self):
        return self.index_def

    def field_has_index(self, fld):
        if self.all_index_flds is None:
            self.all_index_flds = set()
            for k in self.index_def:
                self.all_index_flds.update(utils.str_collection_lower(k))

        return utils.safe_lower(fld) in self.all_index_flds


class TableInfoMgr:
    def __init__(self):
        self.tbl_info_map = {}

    def create_tbl_info(self, tbl_name) -> TableInfo:
        tbl_name = utils.safe_lower(tbl_name)
        if tbl_name in self.tbl_info_map:
            return self.tbl_info_map[tbl_name]
        else:
            self.tbl_info_map[tbl_name] = TableInfo(tbl_name)
            return self.tbl_info_map[tbl_name]

    def drop_table(self, tbl_name):
        tbl_name = utils.safe_lower(tbl_name)
        if tbl_name in self.tbl_info_map:
            del self.tbl_info_map[tbl_name]

    # support in statement
    def __iter__(self):
        return self.tbl_info_map.__iter__()

    def get(self, tbl_name: str) -> TableInfo:
        return self.tbl_info_map.get(utils.safe_lower(tbl_name))

    def find_fields_by_cond(self, cond_func):
        res = []
        for tbl_name in self.tbl_info_map:
            tbl_info = self.get(tbl_name)
            if tbl_info is None:
                continue

            for col_name in tbl_info.column_def:
                col_def = tbl_info.get(col_name)
                if not cond_func(col_def):
                    res.append((tbl_name, col_name))
        return res

    def not_null_ts_field_without_dft_checker(self, col_def: ColumnInfo):
        if col_def.data_type.basic_type in (DBDataType.BTP_TIMESTAMP, DBDataType.BTP_DATETIME):
            if not col_def.is_nullable and not col_def.has_default():
                return False
        return True

    def find_tables_has_no_pk_or_uk(self):
        result = []
        for tbl_name in self.tbl_info_map:
            if not self.check_table_has_pk_or_uk(tbl_name):
                result.append(tbl_name)
        return result

    def check_table_has_pk_or_uk(self, tbl_name):
        tbl_info = self.get(tbl_name)
        if tbl_info is None:
            return False

        if tbl_info.has_create_like:
            return self.check_table_has_pk_or_uk(tbl_info.create_like_source)
        else:
            return tbl_info.has_pk_or_uniquekey()


DYN_TBL_NAME = "$tbl"

PAT_ENGINE = re.compile("ENGINE\s*=\s*InnoDB.*?;", re.IGNORECASE | re.DOTALL)
PAT_USE_BTREE = re.compile("USING\s+BTREE", re.IGNORECASE)
PAT_ZEROFILL = re.compile("ZEROFILL", re.IGNORECASE)
PAT_DROP_IDX = re.compile("drop\s+(index|key)\s+", re.IGNORECASE)
PAT_ALTER_PART_BY = re.compile("alter\s+table\s+\w+\s+partition\s+by.*?;", re.I)
PAT_SUBS = [
    (PAT_ENGINE, ";"),
    (PAT_USE_BTREE, ""),
    (PAT_ZEROFILL, ""),
    (PAT_DROP_IDX, "DROP CONSTRAINT "),
    (PAT_ALTER_PART_BY, ""),
]


class DDLMetaInfo:
    DDL_CREATE_TBL = 0
    DDL_ALTER_TBL = 1
    DDL_DROP_TBL = 2
    DDL_UNKNOWN = 99

    def __init__(self, ddl_type, tbl_name):
        self.ddl_type = ddl_type
        self.tbl_name = tbl_name
        self.if_exists = None

        self.added_columns = None
        self.bef_mod_column_map = None
        self.modified_columns = None
        self.drop_columns = None


class DDLParser(JsonParser):
    CREAT_TBLNAME_FINDER = jsonpath_ng.parse("CreateTable.name[0].value")
    CREAT_COL_FINDER = jsonpath_ng.parse("CreateTable.columns")
    CREAT_PK_FINDER = jsonpath_ng.parse("CreateTable.constraints[*].PrimaryKey")
    CREAT_UNI_FINDER = jsonpath_ng.parse("CreateTable.constraints[*].Unique")
    CREAT_IDX_FINDER = jsonpath_ng.parse("CreateTable.constraints[*].Index")
    CREAT_TBL_IFNOTEXISTS_FINDER = jsonpath_ng.parse("CreateTable.if_not_exists")
    CREAT_TBL_LIKE_FINDER = jsonpath_ng.parse("CreateTable.like")
    CREAT_TBL_LIKE_SOURCE_FINDER = jsonpath_ng.parse("CreateTable.like[0].value")

    UNI_NAME_FINDER = jsonpath_ng.parse("index_name.value")
    IDX_NAME_FINDER = jsonpath_ng.parse("name.value")
    IDX_FLDS_FINDER = jsonpath_ng.parse("columns[*].value")

    ALTER_TBLNAME_FINDER = jsonpath_ng.parse("AlterTable.name[0].value")
    ALTER_MOD_COL_FINDER = jsonpath_ng.parse("AlterTable.operations[*].ModifyColumn")
    ALTER_ADD_COL_FINDER = jsonpath_ng.parse(
        "AlterTable.operations[*].AddColumn.column_def"
    )
    ALTER_DROP_COL_FINDER = jsonpath_ng.parse("AlterTable.operations[*].DropColumn")
    ADD_PK_FINDER = jsonpath_ng.parse(
        "AlterTable.operations[*].AddConstraint.PrimaryKey"
    )
    ADD_UNI_FINDER = jsonpath_ng.parse("AlterTable.operations[*].AddConstraint.Unique")
    ADD_IDX_FINDER = jsonpath_ng.parse("AlterTable.operations[*].AddConstraint.Index")

    DROP_TBLNAME_FINDER = jsonpath_ng.parse("Drop.names[0][0].value")
    DROP_IDX_FINDER = jsonpath_ng.parse("AlterTable.operations[*].DropConstraint")
    DROP_IFEXISTS_FINDER = jsonpath_ng.parse("Drop.if_exists")

    def __init__(self):
        pass

    def parse(self, sql: str, tbl_map: TableInfoMgr, debug=False) -> list:
        tbl_info_mgr = tbl_map if tbl_map is not None else TableInfoMgr()
        try:
            LOGGER.info(sql)
            stmt_list = parse_sql(
                sql=utils.substitute_by_patterns(sql, PAT_SUBS), dialect="mysql"
            )

            if debug:
                LOGGER.info(json.dumps(stmt_list, indent=2))

            res_list = []
            for stmt in stmt_list:
                res_list.append(self.parse_one_stmt(stmt, tbl_info_mgr))
            return res_list
        except Exception as e:
            err_msg = str(e)
            if "found: delimiter" in err_msg.lower():
                LOGGER.warning("parse ddl sql is store-procedure, skipped")
            else:
                LOGGER.error("parse catch err:%s" % traceback.format_exc())
            return []

    def parse_one_stmt(self, stmt, tbl_info_mgr) -> DDLMetaInfo:
        if "CreateTable" in stmt:
            return self.parse_create_table(tbl_info_mgr, stmt)
        elif "AlterTable" in stmt:
            return self.parse_alter_table(tbl_info_mgr, stmt)
        elif "Drop" in stmt:
            return self.parse_drop_table(tbl_info_mgr, stmt)
        else:
            return DDLMetaInfo(DDLMetaInfo.DDL_UNKNOWN, None)

    def parse_drop_table(self, tbl_info_mgr: TableInfoMgr, stmt):
        tbl_name = self.only_match_value(self.DROP_TBLNAME_FINDER, stmt)
        tbl_info_mgr.drop_table(tbl_name)
        ddl_meta_info = DDLMetaInfo(DDLMetaInfo.DDL_DROP_TBL, tbl_name)
        ddl_meta_info.if_exists = self.only_match_value(self.DROP_IFEXISTS_FINDER, stmt)
        return ddl_meta_info

    def parse_alter_table(self, tbl_info_mgr, stmt):
        tbl_name = self.only_match_value(self.ALTER_TBLNAME_FINDER, stmt)
        tbl_info = tbl_info_mgr.create_tbl_info(tbl_name)

        modified_cols = [
            ColumnInfo.from_alter_modify(x)
            for x in self.find_match_values(self.ALTER_MOD_COL_FINDER, stmt)
        ]
        added_cols = [
            ColumnInfo.from_alter_add(x)
            for x in self.find_match_values(self.ALTER_ADD_COL_FINDER, stmt)
        ]
        drop_cols = [
            ColumnInfo.from_alter_drop(x)
            for x in self.find_match_values(self.ALTER_DROP_COL_FINDER, stmt)
        ]
        before_mod_dict = tbl_info.modify_columns(dict((x.name, x) for x in modified_cols))
        tbl_info.add_columns(dict((x.name, x) for x in added_cols))
        tbl_info.drop_columns(dict((x.name, x) for x in drop_cols))

        self.process_column_pk(added_cols, tbl_info, DeltaInfo.ALTER_TP_ADD)

        for idx_node in self.find_match_values(self.ADD_IDX_FINDER, stmt):
            tbl_info.collect_index(
                self.at_most_one_match_value(self.IDX_NAME_FINDER, idx_node),
                IndexInfo.TYP_IDX,
                self.find_idx_flds(idx_node),
                DeltaInfo.ALTER_TP_ADD,
            )

        for uni_node in self.find_match_values(self.ADD_UNI_FINDER, stmt):
            tbl_info.collect_index(
                self.at_most_one_match_value(self.UNI_NAME_FINDER, uni_node),
                IndexInfo.TYP_UNI,
                self.find_idx_flds(uni_node),
                DeltaInfo.ALTER_TP_ADD,
            )

        for pk_node in self.find_match_values(self.ADD_PK_FINDER, stmt):
            tbl_info.collect_index(
                self.at_most_one_match_value(self.IDX_NAME_FINDER, pk_node),
                IndexInfo.TYP_PK,
                self.find_idx_flds(pk_node),
                DeltaInfo.ALTER_TP_ADD,
            )

        for idx_node in self.find_match_values(self.DROP_IDX_FINDER, stmt):
            tbl_info.collect_index(
                self.only_match_value(self.IDX_NAME_FINDER, idx_node),
                IndexInfo.TYP_IDX,
                [],
                DeltaInfo.ALTER_TP_DROP,
            )

        ddl_meta = DDLMetaInfo(DDLMetaInfo.DDL_ALTER_TBL, tbl_name)
        ddl_meta.added_columns = added_cols
        ddl_meta.bef_mod_column_map = before_mod_dict
        ddl_meta.modified_columns = modified_cols
        ddl_meta.drop_columns = drop_cols
        return ddl_meta

    def process_column_pk(self, added_cols, tbl_info: TableInfo, alter_type):
        # alter table add column primary key
        pk_cols = [x.name for x in added_cols if x.is_pk]
        if len(pk_cols) > 1:
            LOGGER.error("multiple primary key defined in table:%s" % tbl_info.name)
        elif len(pk_cols) == 1:
            tbl_info.collect_index('pk', IndexInfo.TYP_PK, pk_cols, alter_type)

    def parse_create_table(self, tbl_info_mgr, stmt):
        tbl_name = self.only_match_value(self.CREAT_TBLNAME_FINDER, stmt)
        tbl_info = tbl_info_mgr.create_tbl_info(tbl_name)

        all_cols = [
            ColumnInfo.from_create(x)
            for x in self.only_match_value(self.CREAT_COL_FINDER, stmt)
        ]
        tbl_info.add_columns(dict((x.name, x) for x in all_cols))

        tbl_info.if_not_exists = self.only_match_value(
            self.CREAT_TBL_IFNOTEXISTS_FINDER, stmt
        )
        tbl_info.has_create_like = self.only_match_value(self.CREAT_TBL_LIKE_FINDER, stmt)
        if tbl_info.has_create_like:
            tbl_info.create_like_source = self.only_match_value(self.CREAT_TBL_LIKE_SOURCE_FINDER, stmt)

        self.process_column_pk(all_cols, tbl_info, DeltaInfo.ALTER_TP_NONE)

        for pk_node in self.find_match_values(self.CREAT_PK_FINDER, stmt):
            tbl_info.collect_index(
                "pk",
                IndexInfo.TYP_PK,
                self.find_idx_flds(pk_node),
            )

        for uni_node in self.find_match_values(self.CREAT_UNI_FINDER, stmt):
            tbl_info.collect_index(
                self.at_most_one_match_value(self.UNI_NAME_FINDER, uni_node),
                IndexInfo.TYP_UNI,
                self.find_idx_flds(uni_node),
            )

        for idx_node in self.find_match_values(self.CREAT_IDX_FINDER, stmt):
            tbl_info.collect_index(
                self.at_most_one_match_value(self.IDX_NAME_FINDER, idx_node),
                IndexInfo.TYP_IDX,
                self.find_idx_flds(idx_node),
            )

        ddl_meta = DDLMetaInfo(DDLMetaInfo.DDL_CREATE_TBL, tbl_name)
        ddl_meta.added_columns = all_cols
        return ddl_meta

    def find_idx_flds(self, idx_node):
        return self.find_match_values(self.IDX_FLDS_FINDER, idx_node)


def parse_ddl(s: str, debug=False):
    m = TableInfoMgr()
    DDLParser().parse(s, m, debug)
    return m


class DDLParserTest(unittest.TestCase):
    def test_parse_ddl_when_create_table(self):
        m = parse_ddl(
            """
        create table t1 (id int not null AUTO_INCREMENT, id2 int, status varchar(1) default 'N',
        creation_date timestamp NOT NULL,
        last_update_date datetime NOT NULL,
        primary key (id) using btree,
        unique key idx_id2(id2) using btree,
        key idx_status(status)) ENGINE=InnoDB COMMENT='test table';
        """,
            True,
        )
        self.assertTrue(m.get("T1") is not None)
        self.assertFalse(m.get("t1").if_not_exists)
        self.assertTrue(m.get("t1").has_pk_or_uniquekey())

        fld_def = m.get("t1").get("id")
        self.assertTrue(fld_def.is_auto_incr)
        self.assertFalse(fld_def.has_default())

        self.assertFalse(m.get("t1").get("id2").is_auto_incr)
        self.assertTrue(m.get("t1").get("id2").is_nullable)
        self.assertFalse(m.get("t1").get("id2").has_default())
        self.assertTrue(m.get("t1").get("status").has_default())

        idx_def = m.get("t1").get_index_def()
        self.assertTrue(("id",) in idx_def)
        self.assertEqual(idx_def.get(("id",)).typ, IndexInfo.TYP_PK)

        self.assertTrue(("id2",) in idx_def)
        self.assertEqual(idx_def.get(("id2",)).typ, IndexInfo.TYP_UNI)
        self.assertEqual(idx_def.get(("id2",)).name, "idx_id2")

        self.assertTrue(("status",) in idx_def)
        self.assertEqual(idx_def.get(("status",)).typ, IndexInfo.TYP_IDX)
        self.assertEqual(idx_def.get(("status",)).name, "idx_status")

        res = m.find_fields_by_cond(m.not_null_ts_field_without_dft_checker)
        self.assertEqual(len(res), 2)
        self.assertEqual(res[0], ('t1', 'creation_date'))
        self.assertEqual(res[1], ('t1', 'last_update_date'))

    def test_parse_ddl_when_create_table_pk(self):
        m = parse_ddl(
            """
        create table t1 (id int not null AUTO_INCREMENT primary key, id2 int, status varchar(1),
        unique key idx_id2(id2) using btree,
        key idx_status(status)) ENGINE=InnoDB COMMENT='test table';
        """,
            True,
        )
        self.assertTrue(m.get("T1") is not None)
        self.assertFalse(m.get("t1").if_not_exists)
        self.assertTrue(m.get("t1").has_pk_or_uniquekey())
        self.assertTrue(('id2',) in m.get("t1").get_index_def())
        self.assertTrue(('id',) in m.get("t1").get_index_def())

        fld_def = m.get("t1").get("id")
        self.assertTrue(fld_def.is_auto_incr)
        self.assertTrue(fld_def.is_pk)

        fld_def = m.get("t1").get("id2")
        self.assertFalse(fld_def.is_pk)

    def test_parse_ddl_when_create_table_no_idx_name(self):
        m = parse_ddl(
            """
        create table if not exists t1 (id int not null, id1 int, id2 int, status varchar(1),
        primary key (id) using btree,
        unique key (id2) using btree,
        key (status)) ENGINE=InnoDB 
        ROW_FORMAT=DYNAMIC
        COMMENT='test table';
        """,
            True,
        )
        self.assertTrue(m.get("t1") is not None)
        self.assertTrue(m.get("t1").if_not_exists)
        self.assertFalse(m.get("t1").has_create_like)
        self.assertTrue(m.get("t1").has_pk_or_uniquekey())
        self.assertTrue(m.get("t1").field_has_index("id"))
        self.assertFalse(m.get("t1").field_has_index("id1"))
        self.assertTrue(m.get("t1").field_has_index("id2"))
        idx_def = m.get("t1").get_index_def()

        self.assertTrue(("id",) in idx_def)
        self.assertEqual(idx_def.get(("id",)).typ, IndexInfo.TYP_PK)

        self.assertTrue(("id2",) in idx_def)
        self.assertEqual(idx_def.get(("id2",)).typ, IndexInfo.TYP_UNI)
        self.assertEqual(idx_def.get(("id2",)).name, None)

        self.assertTrue(("status",) in idx_def)
        self.assertEqual(idx_def.get(("status",)).typ, IndexInfo.TYP_IDX)
        self.assertEqual(idx_def.get(("status",)).name, None)

    def test_parse_ddl_when_create_like_has_pk(self):
        m = parse_ddl(
            """
        create table t_user(id int, name varchar(255), primary key (id), index idx_id(id));    
        create table t_user_copy like t_user;
        """,
            True,
        )
        tbl_info = m.get("t_user_copy")
        self.assertTrue(tbl_info is not None)
        self.assertTrue(tbl_info.has_create_like)
        self.assertEqual(tbl_info.create_like_source, "t_user")
        self.assertEqual(m.find_tables_has_no_pk_or_uk(), [])

    def test_parse_ddl_when_create_like_has_no_pk(self):
        m = parse_ddl(
            """
        create table t_user(id int, name varchar(255));    
        create table t_user_copy like t_user;
        """,
            True,
        )
        tbl_info = m.get("t_user_copy")
        self.assertTrue(tbl_info is not None)
        self.assertTrue(tbl_info.has_create_like)
        self.assertEqual(tbl_info.create_like_source, "t_user")
        self.assertEqual(set(m.find_tables_has_no_pk_or_uk()), set(["t_user", "t_user_copy"]))

    def test_parse_ddl_when_alter_table_add_index(self):
        m = parse_ddl(
            """
        alter table t2 add constraint pk_id primary key (eid);
        alter table t2 add unique index idx_order_id(order_id) using btree;
        alter table t2 add index idx_multi(order_id, time) using btree;
        """,
            True,
        )
        self.assertTrue(m.get("t2") is not None)
        self.assertTrue(m.get("T2").field_has_index("EID"))
        self.assertFalse(m.get("t2").field_has_index("eid1"))
        idx_def = m.get("t2").get_index_def()
        self.assertEqual(len(idx_def), 3)
        self.assertEqual(idx_def.get(("eid",)).typ, IndexInfo.TYP_PK)
        self.assertEqual(idx_def.get(("eid",)).flds, ["eid"])
        self.assertEqual(idx_def.get(("eid",)).name, "pk_id")

        self.assertEqual(idx_def.get(("order_id",)).typ, IndexInfo.TYP_UNI)
        self.assertEqual(idx_def.get(("order_id",)).flds, ["order_id"])
        self.assertEqual(idx_def.get(("order_id",)).name, "idx_order_id")

        self.assertEqual(idx_def.get(("order_id", "time")).typ, IndexInfo.TYP_IDX)
        self.assertEqual(idx_def.get(("order_id", "time")).flds, ["order_id", "time"])
        self.assertEqual(idx_def.get(("order_id", "time")).name, "idx_multi")

    def test_parse_ddl_when_alter_table_drop_index(self):
        m = parse_ddl(
            """
        alter table t2 add unique index idx_order_id(order_id) using btree;    
        alter table t2 drop index idx_order_id;
        alter table t2 drop key idx_multi;
        """,
            True,
        )
        self.assertTrue(m.get("t2") is not None)
        index_def = m.get("t2").get_index_def()
        self.assertFalse(("order_id",) in index_def)
        self.assertTrue(len(index_def) == 0)

    def test_data_type_char(self):
        m = parse_ddl(
            """
        create table t1 (
        id int not null,
        desc char null
        );
        """
        )
        self.assertTrue(m.get("t1") is not None)
        self.assertFalse(m.get("t1").has_pk_or_uniquekey())
        col_def = m.get("t1").get("desc")
        self.assertEqual(col_def.data_type.basic_type, DBDataType.BTP_CHAR)
        self.assertEqual(col_def.data_type.scope, 1)

    def test_partition_by(self):
        m = parse_ddl(
            """
        ALTER TABLE t1 PARTITION BY KEY(name) PARTITIONS 10;
        """
        )
        self.assertFalse("t1" in m)

    def test_drop_table1(self):
        m = TableInfoMgr()
        res_list = DDLParser().parse(
            """
        CREATE TABLE T1(id int not null);
        DROP TABLE IF EXISTS T1;
        """,
            m,
        )
        self.assertFalse("t1" in m)
        self.assertEqual(len(res_list), 2)
        self.assertEqual(res_list[0].ddl_type, DDLMetaInfo.DDL_CREATE_TBL)
        self.assertEqual(res_list[1].ddl_type, DDLMetaInfo.DDL_DROP_TBL)
        self.assertEqual(res_list[0].tbl_name, "T1")
        self.assertEqual(res_list[1].tbl_name, "T1")
        self.assertTrue(res_list[1].if_exists)

    def test_drop_table2(self):
        m = TableInfoMgr()
        res_list = DDLParser().parse(
            """
        DROP TABLE T1;
        CREATE TABLE T1(id int not null);
        """,
            m,
            True,
        )
        self.assertTrue("t1" in m)
        self.assertEqual(len(res_list), 2)
        self.assertEqual(res_list[0].ddl_type, DDLMetaInfo.DDL_DROP_TBL)
        self.assertEqual(res_list[0].tbl_name, "T1")
        self.assertEqual(res_list[0].if_exists, False)

    def test_alter_add_column(self):
        m = TableInfoMgr()
        res_list = DDLParser().parse(
            """
            create table students(
            name varchar(100) NOT NULL,
            grade varchar(10),
            join_time timestamp);
        alter table students add id int not null auto_increment primary key;
        alter table students add sex varchar(10) not null comment '性别';
        alter table students modify name varchar(255) not null comment '姓名';
        alter table students add deleted_flag char(1) not null default 'N' comment '软删除标记';
        alter table students modify grade varchar(20) default 'grade1' comment '年级';
        alter table students drop join_time;
        """,
            m,
            True,
        )
        self.assertEqual(len(res_list), 7)
        self.assertEqual(res_list[0].ddl_type, DDLMetaInfo.DDL_CREATE_TBL)
        self.assertEqual(res_list[1].ddl_type, DDLMetaInfo.DDL_ALTER_TBL)
        self.assertTrue("students" in m)
        tbl_info = m.get("students")
        self.assertTrue(tbl_info.get("name") is not None)

        fld_def = tbl_info.get("id")
        self.assertEqual(
            fld_def.data_type, DBDataType(DBDataType.BTP_INT, None)
        )
        self.assertTrue(fld_def.is_auto_incr)
        self.assertEqual(fld_def.is_nullable, False)

        self.assertTrue(tbl_info.has_pk_or_uniquekey())
        self.assertTrue(('id',) in tbl_info.get_index_def())
        self.assertEqual(tbl_info.index_def.get(('id',)).typ, IndexInfo.TYP_PK)

        fld_def = tbl_info.get("name")
        self.assertEqual(
            fld_def.data_type, DBDataType(DBDataType.BTP_VARCHAR, 255)
        )
        self.assertFalse(fld_def.is_auto_incr)
        self.assertEqual(fld_def.is_nullable, False)
        self.assertEqual(fld_def.comment, "姓名")

        self.assertEqual(tbl_info.get("grade").is_nullable, True)
        self.assertTrue(tbl_info.get("sex") is not None)
        self.assertTrue(tbl_info.get("join_time") is None)

        self.assertFalse(res_list[5].bef_mod_column_map.get('grade').has_default())
        self.assertTrue(tbl_info.get("grade").has_default())
        self.assertFalse(tbl_info.get("sex").has_default())
        self.assertTrue(tbl_info.get("deleted_flag").has_default())


if __name__ == "__main__":
    unittest.main()
