import json
import re
import unittest

import jsonpath_ng
from sqloxide import parse_sql, restore_ast

import utils
from base_parser import JsonParser
from log import LOGGER


class ConditionInfo:
    def __init__(self, owner, fld):
        self.owner = owner.lower() if owner else None
        self.fld = fld.lower() if fld else None

    def __eq__(self, other):
        if isinstance(other, ConditionInfo):
            return self.owner == other.owner and self.fld == other.fld
        else:
            return False

    def __repr__(self):
        return "%s.%s" % (self.owner, self.fld)

    def __hash__(self):
        return hash((self.owner, self.fld))


class DMLMetaInfo:
    SQLTYPE_SEL = 0
    SQLTYPE_UPT = 1
    SQLTYPE_DEL = 2
    SQLTYPE_INS = 3
    SQLTYPE_TRUNC = 4

    INS_NORMAL_MODE = 300
    INS_REPLACE_MODE = 301
    INS_ONDUP_UPT_MODE = 302

    def __init__(self, sql_type, source_mapping, grammar_json):
        self.sql_type = sql_type
        self.insert_mode = self.INS_NORMAL_MODE
        self.grammar_json = grammar_json
        self.normalized_src_mapping = self.normalize(source_mapping)
        self.conds = []
        self.only_table_name = self.calc_only_table()

    def get_grammar_tree(self):
        return self.grammar_json

    def get_origin_sql(self):
        return restore_ast(ast=[self.grammar_json])[0]

    def normalize(self, source_mapping):
        m = {}
        for k in source_mapping:
            m[k.lower()] = source_mapping[k].lower()
        return m

    def calc_only_table(self):
        tbls = set(self.normalized_src_mapping.values())
        return tbls.pop() if len(tbls) == 1 else None

    def get_table_alias_mapping(self):
        return self.normalized_src_mapping

    def has_only_table(self):
        return self.only_table_name is not None

    def get_only_table(self):
        return self.only_table_name

    # owner may be alias or table-name
    def collect_condition(self, fld, owner):
        owner = owner.lower()
        cond_info = ConditionInfo(self.normalized_src_mapping.get(owner), fld)

        if cond_info.owner is None:
            # check whether owner is table-name
            if owner in self.normalized_src_mapping.values():
                cond_info.owner = owner
            else:
                # in subquery we may find alias pointing to a subquery, not a table
                LOGGER.warning(
                    "cannot find source mapping of owner:%s, fld:%s" % (owner, fld)
                )
                return

        if cond_info not in self.conds:
            self.conds.append(cond_info)

    def get_conditions(self):
        return self.conds

    def __repr__(self):
        return "conditions:%s, source mapping:%s" % (
            self.conds,
            self.normalized_src_mapping,
        )


DYN_TBL_NAME = "$tbl"
DYN_FLD_NAME = "$fld"
PAT_FROM_TBL = re.compile("from\s+\?\s+", re.IGNORECASE)
PAT_JOIN_TBL = re.compile("join\s+\?\s+", re.IGNORECASE)
PAT_UPT_DYN_TBL = re.compile("update\s+\?\s+", re.IGNORECASE)
PAT_INS_DYN_TBL = re.compile("insert\s+into\s+\?", re.IGNORECASE)
PAT_DYN_FLD_SET = re.compile("\?\s*=", re.IGNORECASE)
PAT_SUBS = [
    (PAT_FROM_TBL, "FROM %s " % DYN_TBL_NAME),
    (PAT_JOIN_TBL, "JOIN %s " % DYN_TBL_NAME),
    (PAT_INS_DYN_TBL, "INSERT INTO %s " % DYN_TBL_NAME),
    (PAT_UPT_DYN_TBL, "UPDATE %s " % DYN_TBL_NAME),
    (PAT_DYN_FLD_SET, " %s=" % DYN_FLD_NAME),
]


class DMLParser(JsonParser):
    TBL_FINDER = jsonpath_ng.parse("$..Table")
    TRUNCATE_TBL_FINDER = jsonpath_ng.parse("Truncate.table_names[*]")
    TBL_ALIAS_FINDER = jsonpath_ng.parse("$.alias.name.value")
    TBL_NAME_FINDER = jsonpath_ng.parse("$.name[0].value")
    WHERE_FINDER = jsonpath_ng.parse("$..selection")
    LIMIT_FINDER = jsonpath_ng.parse("$..limit")
    JOIN_FINDER = jsonpath_ng.parse("$..joins..On")
    GROUPBY_FINDER = jsonpath_ng.parse("$..group_by.Expressions[*]")
    ORDERBY_FINDER = jsonpath_ng.parse("$..order_by")
    ID_FINDER = jsonpath_ng.parse("$..[Identifier,CompoundIdentifier]")
    FLD_FINDER = jsonpath_ng.parse("$..value")

    REPLACE_FINDER = jsonpath_ng.parse("Insert.replace_into")
    ONDUP_FINDER = jsonpath_ng.parse("Insert.on.DuplicateKeyUpdate")

    def __init__(self):
        pass

    def parse(self, sql):
        LOGGER.info(sql)
        parsed_stmts = parse_sql(
            sql=utils.substitute_by_patterns(sql, PAT_SUBS), dialect="mysql"
        )
        LOGGER.info(json.dumps(parsed_stmts, indent=2))
        res_list = []
        for stmt in parsed_stmts:
            meta_info = DMLMetaInfo(
                self.parse_sql_type(stmt), self.parse_sources(stmt), stmt
            )
            if meta_info.insert_mode == DMLMetaInfo.SQLTYPE_INS:
                self.parse_insert_mode(stmt, meta_info)

            self.parse_conditions(stmt, meta_info)
            res_list.append(meta_info)
        return res_list

    def parse_insert_mode(self, stmt, meta_info: DMLMetaInfo):
        pass

    def parse_sql_type(self, stmt):
        if "Query" in stmt:
            return DMLMetaInfo.SQLTYPE_SEL

        if "Insert" in stmt:
            return DMLMetaInfo.SQLTYPE_INS

        if "Update" in stmt:
            return DMLMetaInfo.SQLTYPE_UPT

        if "Delete" in stmt:
            return DMLMetaInfo.SQLTYPE_DEL

        if "Truncate" in stmt:
            return DMLMetaInfo.SQLTYPE_TRUNC

        return DMLMetaInfo.SQLTYPE_SEL

    def parse_sources(self, stmt):
        m = {}
        tbl_nodes = []
        tbl_nodes.extend(self.find_match_values(self.TBL_FINDER, stmt))
        tbl_nodes.extend(self.find_match_values(self.TRUNCATE_TBL_FINDER, stmt))
        for tbl_node in tbl_nodes:
            tbl_alias, tbl_name = self.parse_source(tbl_node)
            m[tbl_alias] = tbl_name
        return m

    def parse_source(self, tbl_node):
        tbl_name = self.only_match_value(self.TBL_NAME_FINDER, tbl_node)
        tbl_alias_list = self.find_match_values(self.TBL_ALIAS_FINDER, tbl_node)
        tbl_alias = tbl_alias_list[0] if tbl_alias_list else tbl_name
        return tbl_alias, tbl_name

    def find_condition_matches(self, stmt):
        cond_matches = []
        for finder in [
            self.WHERE_FINDER,
            self.JOIN_FINDER,
            self.ORDERBY_FINDER,
            self.GROUPBY_FINDER,
        ]:
            cond_matches.extend(self.find_json_matches(finder, stmt))
        return cond_matches

    def parse_conditions(self, stmt, meta_info: DMLMetaInfo):
        cond_matches = self.find_condition_matches(stmt)
        for cond_match in cond_matches:
            if cond_match.value is None:
                continue
            id_nodes = self.find_json_matches(self.ID_FINDER, cond_match.value)
            for id_node in id_nodes:
                if id_node.value is None:
                    continue

                mediate_path = str(id_node.full_path)
                if self.in_projection(mediate_path):
                    LOGGER.info(
                        "jsonpath:%s in projection, filtered out" % mediate_path
                    )
                    continue

                flds = self.find_json_matches(self.FLD_FINDER, id_node.value)
                if not flds:
                    LOGGER.warning("find no condition fld in %s" % cond_match.value)
                    continue

                if len(flds) == 1:
                    if meta_info.has_only_table():
                        meta_info.collect_condition(
                            flds[0].value, meta_info.get_only_table()
                        )
                    else:
                        # try to find closest table
                        json_path = "%s.%s.%s" % (
                            cond_match.full_path,
                            id_node.full_path,
                            flds[0].full_path,
                        )
                        LOGGER.info("no owner node's path:%s" % json_path)
                        closest_tbls = self.find_closest_tables(json_path, stmt)
                        if closest_tbls and len(closest_tbls) == 1:
                            meta_info.collect_condition(
                                flds[0].value, closest_tbls[0][0]
                            )
                        else:
                            LOGGER.warning(
                                "cannot find out field:%s's owner,please set its owner table, candidates:%s"
                                % (flds[0].value, closest_tbls)
                            )
                else:
                    meta_info.collect_condition(flds[1].value, flds[0].value)

    SELECT_PART = ".Select."
    PROJ_PART = ".projection."

    SUBQRY1_PART = ".Subquery."
    SUBQRY2_PART = ".subquery."

    QRY_PART = "Query."

    def in_projection(self, json_path: str) -> bool:
        return json_path.rfind(self.PROJ_PART) != -1

    def find_closest_table_path(self, json_path: str):
        for candidate_part in (
                self.SELECT_PART,
                self.SUBQRY1_PART,
                self.SUBQRY2_PART,
                self.QRY_PART,
        ):
            pos = json_path.rfind(candidate_part)
            if pos == -1:
                continue

            tbl_json_path = json_path[: pos + len(candidate_part)] + ".Table"
            LOGGER.info("find_closest_tables tbl_json_path:%s" % tbl_json_path)
            return tbl_json_path
        return None

    def find_closest_tables(self, json_path: str, stmt):
        tbl_json_path = self.find_closest_table_path(json_path)
        if tbl_json_path is None:
            return None

        tbl_nodes = self.filter_same_level_tables(
            self.find_json_matches(jsonpath_ng.parse(tbl_json_path), stmt)
        )
        return [self.parse_source(tbl_node) for tbl_node in tbl_nodes]

    def filter_same_level_tables(self, raw_tbl_nodes):
        if not raw_tbl_nodes:
            return []

        tbl_nodes_with_qry_lvl = [
            (self.calc_query_lvl(str(x.full_path)), x.value) for x in raw_tbl_nodes
        ]
        LOGGER.info(
            "filter_same_level_tables tbl_nodes_with_qry_lvl:%s"
            % tbl_nodes_with_qry_lvl
        )
        tbl_nodes_with_qry_lvl.sort(key=lambda x: x[0])

        top_lvl = tbl_nodes_with_qry_lvl[0][0]
        # just keep same-level tables
        return [x[1] for x in tbl_nodes_with_qry_lvl if x[0] == top_lvl]

    def calc_query_lvl(self, json_path: str):
        return json_path.count(self.SUBQRY1_PART) + json_path.count(self.SUBQRY2_PART)


def parse_single_sql(sql):
    return DMLParser().parse(sql)[0]


class DMLParserTest(unittest.TestCase):
    def test_simple_sel(self):
        res = parse_single_sql(
            "select col1 from tbl1 where col2=1 and col3>2 order by col4,col5"
        )
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [
                ConditionInfo("tbl1", "col2"),
                ConditionInfo("tbl1", "col3"),
                ConditionInfo("tbl1", "col4"),
                ConditionInfo("tbl1", "col5"),
            ],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_SEL)

    def test_simple_sel_no_cond(self):
        res = parse_single_sql("select col1 from tbl1")
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_SEL)

    def test_simple_sel_with_alias(self):
        res = parse_single_sql("select col1 from tbl1 a where a.col2=1 and col3>2")
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "col2"), ConditionInfo("tbl1", "col3")],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_SEL)

    def test_complex_sel_with_join(self):
        res = parse_single_sql("select a.col1 from tbl1 a join tbl2 b ON a.id=b.eid")
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "b": "tbl2"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id"), ConditionInfo("tbl2", "eid")],
        )

    def test_complex_sel_with_where(self):
        res = parse_single_sql("select a.col1 from tbl1 a, tbl2 b where a.id=b.eid")
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "b": "tbl2"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id"), ConditionInfo("tbl2", "eid")],
        )

    def test_subquery_when_one_table(self):
        res = parse_single_sql(
            "select a.col1 from tbl1 a join (select eid from tbl1 where col2>10) b where a.id=b.eid"
        )
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id"), ConditionInfo("tbl1", "col2")],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_SEL)

    def test_subquery_when_one_table_no_alias(self):
        res = parse_single_sql(
            "select a.col1 from tbl1 a join (select eid from tbl1 where col2>10) b where id=b.eid"
        )
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id"), ConditionInfo("tbl1", "col2")],
        )

    def test_subquery_when_multi_tables(self):
        res = parse_single_sql(
            "select a.col1 from tbl1 a join (select eid from tbl2 where col2>10) b where a.id=b.eid"
        )
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "tbl2": "tbl2"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id"), ConditionInfo("tbl2", "col2")],
        )

    def test_subquery_when_multi_tables2(self):
        res = parse_single_sql(
            "select a.col1 from tbl1 a join (select eid from tbl2 t where col2>10) b where a.id=b.eid"
        )
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "t": "tbl2"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id"), ConditionInfo("tbl2", "col2")],
        )

    def test_subquery_when_multi_tables_no_alias(self):
        res = parse_single_sql(
            "select a.col1 from TBL1 a join (select eid from tbl2 t where col2>10) b where a.id=eid"
        )
        self.assertEqual(res.get_table_alias_mapping(), {"a": "tbl1", "t": "tbl2"})
        self.assertEqual(
            res.get_conditions(),
            [
                ConditionInfo("tbl1", "id"),
                ConditionInfo("tbl1", "eid"),
                ConditionInfo("tbl2", "col2"),
            ],
        )

    def test_update(self):
        res = parse_single_sql("update tbl1 set desc='bb' where id=1")
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id")],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_UPT)

    def test_update_no_cond(self):
        res = parse_single_sql("update tbl1 set desc='bb'")
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_UPT)

    def test_delete(self):
        res = parse_single_sql("delete from tbl1 where id=1")
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "id")],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_DEL)

    def test_delete_no_cond(self):
        res = parse_single_sql("delete from tbl1")
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [],
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_DEL)

    def test_group_by(self):
        res = parse_single_sql("select id1 from tbl1 t1 where time<10 group by t1.id1")
        self.assertEqual(res.get_table_alias_mapping(), {"t1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "time"), ConditionInfo("tbl1", "id1")],
        )

    def test_group_by_no_alias(self):
        res = parse_single_sql("select id1 from tbl1  where time<10 group by id1")
        self.assertEqual(res.get_table_alias_mapping(), {"tbl1": "tbl1"})
        self.assertEqual(
            res.get_conditions(),
            [ConditionInfo("tbl1", "time"), ConditionInfo("tbl1", "id1")],
        )

    def test_sub_query(self):
        res = parse_single_sql(
            """
        SELECT last_name,salary
FROM employees
WHERE salary > (
		SELECT old_salary
		FROM old_employees
		WHERE last_name = 'Abel'
		);
        """
        )
        self.assertEqual(
            res.get_table_alias_mapping(),
            {"employees": "employees", "old_employees": "old_employees"},
        )
        self.assertEqual(
            res.get_conditions(),
            [
                ConditionInfo("employees", "salary"),
                ConditionInfo("old_employees", "last_name"),
            ],
        )

    def test_sub_query2(self):
        res = parse_single_sql(
            """
        SELECT id1, IFNULL((select desc2 from tbl2 t1 where id2<10 limit 1), desc1) as des FROM tbl1 t where id1<100 order by sn;
        """
        )
        self.assertEqual(
            res.get_table_alias_mapping(),
            {"t": "tbl1", "t1": "tbl2"},
        )
        self.assertEqual(
            res.get_conditions(),
            [
                ConditionInfo("tbl1", "id1"),
                ConditionInfo("tbl2", "id2"),
                ConditionInfo("tbl1", "sn"),
            ],
        )

    def test_dyn_upt(self):
        res = parse_single_sql(
            """
        update ? set ?=?, date=now() where ?=?;
        """
        )
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_UPT)

    def test_parse_ddl_when_dyn_sql1(self):
        m = parse_single_sql(
            """
        delete from ? where ? < ? limit 100;
        """
        )
        self.assertTrue(m.get_only_table() == DYN_TBL_NAME)

    def test_parse_ddl_when_dyn_sql2(self):
        res = parse_single_sql(
            """
        delete from tbl t1 join ? t2 on t1.id=t2.id where ? < ? limit 100;
        """
        )
        self.assertEqual(
            res.get_table_alias_mapping(), {"t1": "tbl", "t2": DYN_TBL_NAME}
        )

    def test_parse_ddl_when_truncate(self):
        res = parse_single_sql(
            """
        truncate table t1;
        """
        )
        self.assertEqual(res.get_table_alias_mapping(), {"t1": "t1"})
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_TRUNC)

    def test_parse_ddl_when_insert(self):
        res = parse_single_sql(
            """
        insert into t1 values(1, 'a');
        """
        )
        self.assertEqual(res.get_table_alias_mapping(), {})
        self.assertEqual(res.sql_type, DMLMetaInfo.SQLTYPE_INS)


if __name__ == "__main__":
    unittest.main()
