import os
import json
import logging
import pymysql
import sys
from typing import List, Dict, Set, Tuple

# MySQL 8.0 reserved keywords list (main keywords)
MYSQL_KEYWORDS = {
    'ACCESSIBLE', 'ADD', 'ALL', 'ALTER', 'ANALYZE', 'AND', 'AS', 'ASC',
    'ASENSITIVE', 'BEFORE', 'BETWEEN', 'BIGINT', 'BINARY', 'BLOB', 'BOTH',
    'BY', 'CALL', 'CASCADE', 'CASE', 'CHANGE', 'CHAR', 'CHARACTER', 'CHECK',
    'COLLATE', 'COLUMN', 'CONDITION', 'CONSTRAINT', 'CONTINUE', 'CONVERT',
    'CREATE', 'CROSS', 'CURRENT_DATE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP',
    'CURRENT_USER', 'CURSOR', 'DATABASE', 'DATABASES', 'DAY_HOUR', 'DAY_MICROSECOND',
    'DAY_MINUTE', 'DAY_SECOND', 'DEC', 'DECIMAL', 'DECLARE', 'DEFAULT', 'DELAYED',
    'DELETE', 'DESC', 'DESCRIBE', 'DETERMINISTIC', 'DISTINCT', 'DIV', 'DOUBLE',
    'DROP', 'DUAL', 'EACH', 'ELSE', 'ELSEIF', 'ENCLOSED', 'ESCAPED', 'EXISTS',
    'EXIT', 'EXPLAIN', 'FALSE', 'FETCH', 'FLOAT', 'FLOAT4', 'FLOAT8', 'FOR',
    'FORCE', 'FOREIGN', 'FROM', 'FULLTEXT', 'GENERATED', 'GET', 'GRANT', 'GROUP',
    'HAVING', 'HIGH_PRIORITY', 'HOUR_MICROSECOND', 'HOUR_MINUTE', 'HOUR_SECOND',
    'IF', 'IGNORE', 'IN', 'INDEX', 'INFILE', 'INNER', 'INOUT', 'INSENSITIVE',
    'INSERT', 'INT', 'INT1', 'INT2', 'INT3', 'INT4', 'INT8', 'INTEGER', 'INTERVAL',
    'INTO', 'IO_AFTER_GTIDS', 'IO_BEFORE_GTIDS', 'IS', 'ITERATE', 'JOIN', 'KEY',
    'KEYS', 'KILL', 'LEADING', 'LEFT', 'LIKE', 'LIMIT', 'LINEAR', 'LINES', 'LOAD',
    'LOCALTIME', 'LOCALTIMESTAMP', 'LOCK', 'LONG', 'LONGBLOB', 'LONGTEXT', 'LOOP',
    'LOW_PRIORITY', 'MASTER_BIND', 'MASTER_SSL_VERIFY_SERVER_CERT', 'MATCH',
    'MAXVALUE', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', 'MIDDLEINT', 'MINUTE_MICROSECOND',
    'MINUTE_SECOND', 'MOD', 'MODIFIES', 'NATURAL', 'NOT', 'NO_WRITE_TO_BINLOG',
    'NULL', 'NUMERIC', 'ON', 'OPTIMIZE', 'OPTION', 'OPTIONALLY', 'OR', 'ORDER',
    'OUT', 'OUTER', 'OUTFILE', 'PARTITION', 'PRECISION', 'PRIMARY', 'PROCEDURE',
    'PURGE', 'RANGE', 'READ', 'READS', 'READ_WRITE', 'REAL', 'REFERENCES',
    'REGEXP', 'RELEASE', 'RENAME', 'REPEAT', 'REPLACE', 'REQUIRE', 'RESIGNAL',
    'RESTRICT', 'RETURN', 'REVOKE', 'RIGHT', 'RLIKE', 'SCHEMA', 'SCHEMAS',
    'SECOND_MICROSECOND', 'SELECT', 'SENSITIVE', 'SEPARATOR', 'SET', 'SHOW',
    'SIGNAL', 'SMALLINT', 'SPATIAL', 'SPECIFIC', 'SQL', 'SQLEXCEPTION', 'SQLSTATE',
    'SQLWARNING', 'SQL_BIG_RESULT', 'SQL_CALC_FOUND_ROWS', 'SQL_SMALL_RESULT',
    'SSL', 'STARTING', 'STORED', 'STRAIGHT_JOIN', 'TABLE', 'TERMINATED', 'THEN',
    'TINYBLOB', 'TINYINT', 'TINYTEXT', 'TO', 'TRAILING', 'TRIGGER', 'TRUE',
    'UNDO', 'UNION', 'UNIQUE', 'UNLOCK', 'UNSIGNED', 'UPDATE', 'USAGE', 'USE',
    'USING', 'UTC_DATE', 'UTC_TIME', 'UTC_TIMESTAMP', 'VALUES', 'VARBINARY',
    'VARCHAR', 'VARCHARACTER', 'VARYING', 'WHEN', 'WHERE', 'WHILE', 'WITH',
    'WRITE', 'XOR', 'YEAR_MONTH', 'ZEROFILL'
}

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[logging.StreamHandler()]
)
logger = logging.getLogger()


def wrap_with_backticks(value: str) -> str:
    """Wrap a string with backticks if it's not already wrapped"""
    if not (value.startswith('`') and value.endswith('`')):
        return f'`{value}`'
    return value


def check_and_fix_keywords_in_json(file_path: str) -> Tuple[Dict[str, Set[str]], bool, str]:
    """Check for MySQL keywords in JSON file and fix them by adding backticks"""
    issues = {
        'columns': set(),
        'tables': set()
    }
    modified = False
    modifications = []

    try:
        with open(file_path, 'r', encoding='utf-8') as f:
            data = json.load(f)

            # Make a deep copy for comparison later
            original_data = json.loads(json.dumps(data))

            # Check and fix reader section
            reader = data.get('job', {}).get('content', [{}])[0].get('reader', {})
            if reader.get('name') == 'mysqlreader':
                params = reader.get('parameter', {})

                # Check and fix columns
                columns = params.get('column', [])
                for i, col in enumerate(columns):
                    clean_col = str(col).strip('`\'"')
                    if clean_col.upper() in MYSQL_KEYWORDS:
                        issues['columns'].add(clean_col)
                        fixed_col = wrap_with_backticks(clean_col)
                        if col != fixed_col:
                            columns[i] = fixed_col
                            modifications.append(f"Column: {col} → {fixed_col}")
                            modified = True

                # Check and fix tables
                connections = params.get('connection', [])
                for conn in connections:
                    tables = conn.get('table', [])
                    for i, table in enumerate(tables):
                        clean_table = str(table).strip('`\'"')
                        if clean_table.upper() in MYSQL_KEYWORDS:
                            issues['tables'].add(clean_table)
                            fixed_table = wrap_with_backticks(clean_table)
                            if table != fixed_table:
                                tables[i] = fixed_table
                                modifications.append(f"Table: {table} → {fixed_table}")
                                modified = True

            # Check and fix writer section
            writer = data.get('job', {}).get('content', [{}])[0].get('writer', {})
            if writer.get('name') == 'mysqlwriter':
                params = writer.get('parameter', {})

                # Check and fix columns
                columns = params.get('column', [])
                for i, col in enumerate(columns):
                    clean_col = str(col).strip('`\'"')
                    if clean_col.upper() in MYSQL_KEYWORDS:
                        issues['columns'].add(clean_col)
                        fixed_col = wrap_with_backticks(clean_col)
                        if col != fixed_col:
                            columns[i] = fixed_col
                            modifications.append(f"Column: {col} → {fixed_col}")
                            modified = True

                # Check and fix tables
                connections = params.get('connection', [])
                for conn in connections:
                    tables = conn.get('table', [])
                    for i, table in enumerate(tables):
                        clean_table = str(table).strip('`\'"')
                        if clean_table.upper() in MYSQL_KEYWORDS:
                            issues['tables'].add(clean_table)
                            fixed_table = wrap_with_backticks(clean_table)
                            if table != fixed_table:
                                tables[i] = fixed_table
                                modifications.append(f"Table: {table} → {fixed_table}")
                                modified = True

            # Write back to file if modified
            if modified:
                with open(file_path, 'w', encoding='utf-8') as f:
                    json.dump(data, f, indent=2, ensure_ascii=False)

    except (json.JSONDecodeError, KeyError, IndexError) as e:
        logger.error(f"Error parsing file {file_path}: {str(e)}")
        return issues, False, ""

    # Generate modification log
    modification_log = "\n".join(modifications) if modifications else "No modifications needed"

    return issues, modified, modification_log


class DataXGenerator:
    def __init__(self, config):
        self.config = config
        self.tables = self._get_source_tables()
        os.makedirs(config['datax']['job_dir'], exist_ok=True)
        os.makedirs(config['datax']['script_dir'], exist_ok=True)

    def _get_source_tables(self):
        """Get all tables from source database"""
        try:
            conn = pymysql.connect(
                host=self.config['source_db']['host'],
                port=self.config['source_db']['port'],
                user=self.config['source_db']['user'],
                password=self.config['source_db']['password'],
                database=self.config['source_db']['database']
            )
            with conn.cursor() as cursor:
                cursor.execute("SHOW TABLES")
                return [row[0] for row in cursor.fetchall()]
        except Exception as e:
            logger.error(f"Failed to get tables: {str(e)}")
            raise
        finally:
            if 'conn' in locals():
                conn.close()

    def generate_all_artifacts(self):
        """Generate all artifacts (job files and shell script)"""
        self._generate_job_files()
        self._check_keywords_in_jobs()
        logger.info(f"Generated {len(self.tables)} jobs")

    def _check_keywords_in_jobs(self):
        """Check for MySQL keywords in generated job files"""
        job_dir = self.config['datax']['job_dir']
        total_issues = 0
        modified_files = 0

        for file_name in os.listdir(job_dir):
            if file_name.endswith('.json'):
                file_path = os.path.join(job_dir, file_name)
                issues, modified, _ = check_and_fix_keywords_in_json(file_path)

                current_issues = len(issues['columns']) + len(issues['tables'])
                total_issues += current_issues

                if current_issues > 0:
                    logger.info(f"Found {current_issues} potential MySQL keyword issues in {file_name}")
                    if modified:
                        modified_files += 1

        logger.info(f"Found {total_issues} potential MySQL keyword issues across {len(self.tables)} files.")
        logger.info(f"Modified {modified_files} files to add backticks around MySQL keywords.")

    def _generate_job_files(self):
        """Generate DataX job files"""
        for table in self.tables:
            job_config = self._build_job_config(table)
            self._save_job_file(table, job_config)

    def _build_job_config(self, table):
        """Build complete Job configuration"""
        return {
            "job": {
                "content": [{
                    "reader": self._build_reader_config(table),
                    "writer": self._build_writer_config(table)
                }],
                "setting": self._build_setting_config()
            }
        }

    def _get_table_columns(self, table):
        """
        Retrieve all column names for the specified table
        Returns a list of column names in their original case

        Args:
            table (str): Table name to inspect

        Returns:
            list: Column names (e.g., ["id", "app_name", "update_time"])
                or ["*"] if column retrieval fails
        """
        try:
            conn = pymysql.connect(
                host=self.config['source_db']['host'],
                port=self.config['source_db']['port'],
                user=self.config['source_db']['user'],
                password=self.config['source_db']['password'],
                database=self.config['source_db']['database'],
                charset='utf8mb4',
                cursorclass=pymysql.cursors.DictCursor
            )

            with conn.cursor() as cursor:
                cursor.execute("""
                    SELECT COLUMN_NAME as column_name 
                    FROM information_schema.columns 
                    WHERE table_schema = %s 
                    AND table_name = %s
                    ORDER BY ordinal_position
                """, (self.config['source_db']['database'], table))

                results = cursor.fetchall()
                if not results:
                    logger.warning(f"Table {table} has no columns, falling back to wildcard")
                    return ["*"]

                columns = []
                for row in results:
                    column_name = row.get('column_name') or row.get('COLUMN_NAME')
                    if column_name:
                        columns.append(column_name)
                    else:
                        logger.warning(f"Could not determine column name from row: {row}")

                if not columns:
                    logger.warning(f"No valid columns found for table {table}, falling back to wildcard")
                    return ["*"]

                logger.debug(f"Retrieved columns for table {table}: {columns}")
                return columns

        except Exception as e:
            logger.error(f"Failed to get columns for table {table}: {str(e)}", exc_info=True)
            return ["*"]
        finally:
            if 'conn' in locals():
                conn.close()

    def _build_reader_config(self, table):
        """Build Reader configuration"""
        columns = self._get_table_columns(table)
        if columns is None:
            columns = ["*"]

        return {
            "name": "mysqlreader",
            "parameter": {
                "username": self.config['source_db']['user'],
                "password": self.config['source_db']['password'],
                "column": columns,
                "connection": [{
                    "jdbcUrl": [self._build_jdbc_url('source_db')],
                    "table": [table]
                }],
                "where": self.config['datax'].get('where', ""),
                "splitPk": self._get_split_pk(table)
            }
        }

    def _build_writer_config(self, table):
        """
        Build writer configuration ensuring column consistency with reader

        Args:
            table (str): Target table name

        Returns:
            dict: Complete writer configuration
        """
        columns = self._get_table_columns(table)

        config = {
            "name": "mysqlwriter",
            "parameter": {
                "writeMode": self.config['datax'].get('write_mode', 'insert'),
                "username": self.config['target_db']['user'],
                "password": self.config['target_db']['password'],
                "column": columns,
                "connection": [{
                    "jdbcUrl": self._build_jdbc_url('target_db'),
                    "table": [table]
                }],
                "session": [
                    "SET SESSION sql_mode='ANSI_QUOTES'",
                    "SET SESSION time_zone='+8:00'"
                ]
            }
        }

        if config['parameter']['writeMode'] in ('update', 'replace'):
            config['parameter']['keyColumn'] = self._get_split_pk(table) or ['id']

        logger.info(f"Built writer config for table {table} with {len(columns)} columns")
        return config

    def _build_setting_config(self):
        """Build performance settings"""
        return {
            "speed": {
                "channel": str(self.config['datax'].get('channel', 16)),
                "batchSize": self.config['datax'].get('batch_size', 10240)
            }
        }

    def _build_jdbc_url(self, db_type):
        """Build JDBC connection string"""
        db_config = self.config[f'{db_type}']
        params = {
            'useUnicode': 'true',
            'characterEncoding': 'utf-8',
            'useSSL': 'false'
        }
        param_str = '&'.join(f"{k}={v}" for k, v in params.items())
        return f"jdbc:mysql://{db_config['host']}:{db_config['port']}/{db_config['database']}?{param_str}"

    def _get_split_pk(self, table):
        """Get actual primary key field (for splitPk and keyColumn)"""
        try:
            conn = pymysql.connect(
                host=self.config['source_db']['host'],
                port=self.config['source_db']['port'],
                user=self.config['source_db']['user'],
                password=self.config['source_db']['password'],
                database=self.config['source_db']['database']
            )
            with conn.cursor() as cursor:
                cursor.execute(f"SHOW KEYS FROM `{table}` WHERE Key_name = 'PRIMARY'")
                if result := cursor.fetchone():
                    return result[4]

                cursor.execute(f"SHOW INDEX FROM `{table}` WHERE Non_unique = 0")
                if result := cursor.fetchone():
                    return result[4]

                return None
        except Exception as e:
            logger.warning(f"Cannot get splitPk for {table}: {str(e)}")
            return None
        finally:
            if 'conn' in locals():
                conn.close()

    def _save_job_file(self, table, config):
        """Save Job file"""
        path = os.path.join(self.config['datax']['job_dir'], f"{table}.json")
        with open(path, 'w') as f:
            json.dump(config, f, indent=2, ensure_ascii=False)
        logger.debug(f"Generated job file: {path}")


def main(config_file):
    try:
        with open(config_file) as f:
            config = json.load(f)

        generator = DataXGenerator(config)
        generator.generate_all_artifacts()

    except FileNotFoundError:
        logger.error(f"Config file not found: {config_file}")
        sys.exit(1)
    except json.JSONDecodeError:
        logger.error("Invalid JSON format in config file")
        sys.exit(1)
    except Exception as e:
        logger.error(f"Error: {str(e)}")
        sys.exit(1)


if __name__ == "__main__":
    if len(sys.argv) != 2:
        print("Usage: python datax_mysql_keyword_checker.py config.json")
        sys.exit(1)
    main(sys.argv[1])