from flask import Flask, render_template, request, jsonify, session, redirect, url_for
import os
import re
import ssl
import psycopg2
from psycopg2 import sql
from functools import wraps
import logging
import sys
import argparse
from logging.handlers import RotatingFileHandler
import datetime
import configparser
import socket

# 配置日志系统
def setup_logger(debug_mode=False):
    # 创建logger对象
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG if debug_mode else logging.INFO)
    
    # 清除已有的handler
    for handler in logger.handlers[:]:
        logger.removeHandler(handler)
    
    # 创建控制台handler
    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(logging.DEBUG if debug_mode else logging.INFO)
    
    # 创建文件handler（支持日志轮转）
    log_dir = 'logs'
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    
    log_file = os.path.join(log_dir, f"pgobjview_{datetime.datetime.now().strftime('%Y%m%d')}.log")
    file_handler = RotatingFileHandler(
        log_file,
        maxBytes=10*1024*1024,  # 10MB
        backupCount=5
    )
    file_handler.setLevel(logging.DEBUG if debug_mode else logging.INFO)
    
    # 设置日志格式
    formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
    console_handler.setFormatter(formatter)
    file_handler.setFormatter(formatter)
    
    # 添加handler到logger
    logger.addHandler(console_handler)
    logger.addHandler(file_handler)
    
    return logger

# 创建app实例前设置日志
parser = argparse.ArgumentParser(description='PosgreSQL对象结构获取工具')
parser.add_argument('--debug', action='store_true', help='启用调试模式')
args, unknown = parser.parse_known_args()

# 设置全局logger
logger = setup_logger(args.debug)

app = Flask(__name__)
app.secret_key = 'pgobjview-key'
app.config['SESSION_TYPE'] = 'filesystem'

# 保持浏览器会话连接时长，这里为1小时，超过这个时间，就需要重新登录
app.config['PERMANENT_SESSION_LIFETIME'] = 3600  # 1小时会话有效期

# 日志记录装饰器
def log_request(func):
    @wraps(func)
    def decorated_function(*args, **kwargs):
        logger.debug(f'Request: {request.method} {request.path}')
        try:
            result = func(*args, **kwargs)
            logger.debug(f'Response: {request.path} - Success')
            return result
        except Exception as e:
            logger.error(f'Error in {request.path}: {str(e)}')
            raise
    return decorated_function

def login_required(f):
    @wraps(f)
    def decorated_function(*args, **kwargs):
        if 'db_config' not in session:
            return redirect(url_for('index'))
        return f(*args, **kwargs)
    return decorated_function

@app.route('/')
def index():
    return render_template('login.html')

@app.route('/main')
@login_required
def main():
    return render_template('index.html', db_config=session['db_config'])

@app.route('/connect', methods=['POST'])
def connect():
    try:
        conn = psycopg2.connect(
            host=request.form['host'],
            port=request.form['port'],
            user=request.form['username'],
            password=request.form['password'],
            database=request.form['database']
        )
        conn.close()
        # 保存连接信息到session
        session['db_config'] = {
            'host': request.form['host'],
            'port': request.form['port'],
            'username': request.form['username'],
            'password': request.form['password'],
            'database': request.form['database']
        }
        return jsonify({'status': 'success'})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/schemas', methods=['GET'])
@login_required
def get_schemas():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        cur.execute("""
            SELECT nspname 
            FROM pg_namespace 
            WHERE nspname !~ '^pg_' AND nspname != 'information_schema'
            ORDER BY nspname
        """)
        schemas = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'schemas': schemas})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/tables', methods=['GET'])
@login_required
def get_tables():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT c.relname 
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            WHERE n.nspname = %s AND c.relkind = 'r'
            ORDER BY c.relname
        """)
        cur.execute(query, (request.args.get('schema'),))
        tables = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'tables': tables})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/views', methods=['GET'])
@login_required
def get_views():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT c.relname 
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            WHERE n.nspname = %s AND c.relkind = 'v'
            ORDER BY c.relname
        """)
        cur.execute(query, (request.args.get('schema'),))
        views = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'views': views})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/matviews', methods=['GET'])
@login_required
def get_matviews():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT c.relname 
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            JOIN pg_matviews m ON m.matviewname = c.relname AND m.schemaname = n.nspname
            WHERE n.nspname = %s AND c.relkind = 'm'
            ORDER BY c.relname
        """)
        cur.execute(query, (request.args.get('schema'),))
        matviews = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'matviews': matviews})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/search_objects', methods=['GET'])
@login_required
@log_request  # 添加日志装饰器
def search_objects():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        search_term = request.args.get('search_term', '')
        schema = request.args.get('schema', '')
        
        # 搜索所有类型的对象（表、视图、物化视图、序列、枚举类型、域类型、范围类型、函数、存储过程、触发器）
        relkinds = ['r', 'v', 'm', 'S', 'e', 'd', 'r']  # 表、视图、物化视图、序列、枚举、域、范围
        prokinds = ['f', 'p']  # 函数、存储过程
        trigger_kind = 't'  # 触发器
        all_object_types = relkinds + prokinds + [trigger_kind]
        
        logger.info(f"Searching objects - Keyword: {search_term}, Schema: {schema or 'All schemas'}")
        
        if schema:
            # 搜索特定模式下的对象
            query = """
            SELECT 
                schema_name, 
                object_name, 
                object_type, 
                oid, 
                table_name, 
                func_name, 
                type_name, 
                trigger_name, 
                table_kind, 
                func_kind
            FROM (
                -- 查询表、视图、物化视图、序列
                SELECT 
                    n.nspname::text AS schema_name,
                    c.relname::text AS object_name,
                    CASE c.relkind 
                        WHEN 'r' THEN 'table' 
                        WHEN 'v' THEN 'view' 
                        WHEN 'm' THEN 'matview' 
                        WHEN 'S' THEN 'sequence' 
                        ELSE 'unknown' 
                    END::text AS object_type,
                    NULL::oid AS oid,
                    c.relname::text AS table_name,
                    NULL::text AS func_name,
                    NULL::text AS type_name,
                    NULL::text AS trigger_name,
                    c.relkind::text AS table_kind,
                    NULL::text AS func_kind
                FROM pg_class c
                JOIN pg_namespace n ON c.relnamespace = n.oid
                WHERE n.nspname = %s
                AND c.relkind = ANY(%s)
                AND c.relname ILIKE %s
                
                UNION ALL
                
                -- 查询类型
                SELECT 
                    n.nspname::text AS schema_name,
                    t.typname::text AS object_name,
                    'type'::text AS object_type,
                    NULL::oid AS oid,
                    NULL::text AS table_name,
                    NULL::text AS func_name,
                    t.typname::text AS type_name,
                    NULL::text AS trigger_name,
                    NULL::text AS table_kind,
                    NULL::text AS func_kind
                FROM pg_type t
                JOIN pg_namespace n ON t.typnamespace = n.oid
                WHERE n.nspname = %s
                AND t.typtype IN ('e', 'd', 'r')
                AND t.typname ILIKE %s
                
                UNION ALL
                
                -- 查询函数和存储过程
                SELECT 
                    n.nspname::text AS schema_name,
                    (p.proname || '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')')::text AS object_name,
                    CASE p.prokind 
                        WHEN 'f' THEN 'function' 
                        WHEN 'p' THEN 'procedure' 
                        ELSE 'unknown' 
                    END::text AS object_type,
                    p.oid::oid AS oid,
                    NULL::text AS table_name,
                    p.proname::text AS func_name,
                    NULL::text AS type_name,
                    NULL::text AS trigger_name,
                    NULL::text AS table_kind,
                    p.prokind::text AS func_kind
                FROM pg_proc p
                JOIN pg_namespace n ON p.pronamespace = n.oid
                WHERE n.nspname = %s
                AND p.prokind = ANY(%s)
                AND p.proname ILIKE %s
                
                UNION ALL
                
                -- 查询触发器
                SELECT 
                    n.nspname::text AS schema_name,
                    tr.tgname::text AS object_name,
                    'trigger'::text AS object_type,
                    NULL::oid AS oid,
                    NULL::text AS table_name,
                    NULL::text AS func_name,
                    NULL::text AS type_name,
                    tr.tgname::text AS trigger_name,
                    NULL::text AS table_kind,
                    NULL::text AS func_kind
                FROM pg_trigger tr
                JOIN pg_class c ON tr.tgrelid = c.oid
                JOIN pg_namespace n ON c.relnamespace = n.oid
                WHERE n.nspname = %s
                AND tr.tgname ILIKE %s
                AND NOT tr.tgisinternal
            ) AS combined_results
            ORDER BY schema_name, object_name;
            """
            logger.debug(f'Executing schema-specific query: {query}')
            logger.debug(f'Parameters: relkinds={relkinds}, search_term={search_term}, schema={schema}')
            cur.execute(query, (relkinds, f'%{search_term}%', f'%{search_term}%', prokinds, f'%{search_term}%', f'%{search_term}%', schema))
        else:
            # 搜索所有模式下的对象
            query = """
            SELECT 
                schema_name, 
                object_name, 
                object_type, 
                oid, 
                table_name, 
                func_name, 
                type_name, 
                trigger_name, 
                table_kind, 
                func_kind
            FROM (
                -- 查询表、视图、物化视图、序列
                SELECT 
                    n.nspname::text AS schema_name,
                    c.relname::text AS object_name,
                    CASE c.relkind 
                        WHEN 'r' THEN 'table' 
                        WHEN 'v' THEN 'view' 
                        WHEN 'm' THEN 'matview' 
                        WHEN 'S' THEN 'sequence' 
                        ELSE 'unknown' 
                    END::text AS object_type,
                    NULL::oid AS oid,
                    c.relname::text AS table_name,
                    NULL::text AS func_name,
                    NULL::text AS type_name,
                    NULL::text AS trigger_name,
                    c.relkind::text AS table_kind,
                    NULL::text AS func_kind
                FROM pg_class c
                JOIN pg_namespace n ON c.relnamespace = n.oid
                WHERE n.nspname !~ '^pg_|^information_schema'
                AND c.relkind = ANY(%s)
                AND c.relname ILIKE %s
                
                UNION ALL
                
                -- 查询类型
                SELECT 
                    n.nspname::text AS schema_name,
                    t.typname::text AS object_name,
                    'type'::text AS object_type,
                    NULL::oid AS oid,
                    NULL::text AS table_name,
                    NULL::text AS func_name,
                    t.typname::text AS type_name,
                    NULL::text AS trigger_name,
                    NULL::text AS table_kind,
                    NULL::text AS func_kind
                FROM pg_type t
                JOIN pg_namespace n ON t.typnamespace = n.oid
                WHERE n.nspname !~ '^pg_|^information_schema'
                AND t.typtype IN ('e', 'd', 'r')
                AND t.typname ILIKE %s
                
                UNION ALL
                
                -- 查询函数和存储过程
                SELECT 
                    n.nspname::text AS schema_name,
                    (p.proname || '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')')::text AS object_name,
                    CASE p.prokind 
                        WHEN 'f' THEN 'function' 
                        WHEN 'p' THEN 'procedure' 
                        ELSE 'unknown' 
                    END::text AS object_type,
                    p.oid::oid AS oid,
                    NULL::text AS table_name,
                    p.proname::text AS func_name,
                    NULL::text AS type_name,
                    NULL::text AS trigger_name,
                    NULL::text AS table_kind,
                    p.prokind::text AS func_kind
                FROM pg_proc p
                JOIN pg_namespace n ON p.pronamespace = n.oid
                WHERE n.nspname !~ '^pg_|^information_schema'
                AND p.prokind = ANY(%s)
                AND p.proname ILIKE %s
                
                UNION ALL
                
                -- 查询触发器
                SELECT 
                    n.nspname::text AS schema_name,
                    tr.tgname::text AS object_name,
                    'trigger'::text AS object_type,
                    NULL::oid AS oid,
                    NULL::text AS table_name,
                    NULL::text AS func_name,
                    NULL::text AS type_name,
                    tr.tgname::text AS trigger_name,
                    NULL::text AS table_kind,
                    NULL::text AS func_kind
                FROM pg_trigger tr
                JOIN pg_class c ON tr.tgrelid = c.oid
                JOIN pg_namespace n ON c.relnamespace = n.oid
                WHERE n.nspname !~ '^pg_|^information_schema'
                AND tr.tgname ILIKE %s
                AND NOT tr.tgisinternal
            ) AS combined_results
            ORDER BY schema_name, object_name;
            """
            print(f"执行全局搜索查询: {query}")
            print(f"参数: relkinds={relkinds}, search_term={search_term}")
            cur.execute(query, (relkinds, f'%{search_term}%', f'%{search_term}%', prokinds, f'%{search_term}%', f'%{search_term}%'))
        
        results = []
        for row in cur.fetchall():
            print(f"原始行数据: {row}")
            if row and len(row) >= 4:
                schema = row[0]
                name = row[1]
                obj_type = row[2]
                oid = row[3]
                
                # 直接使用查询返回的对象类型
                mapped_type = obj_type
                print(f"识别为{mapped_type}: {row}")  # 调试日志
                
                results.append({
                    'schema': schema,
                    'name': name,
                    'type': mapped_type,
                    'oid': oid
                })
            else:
                print(f"忽略无效行: {row}")
                
        print(f"找到 {len(results)} 个匹配对象")
        conn.close()
        return jsonify({'status': 'success', 'objects': results})
    except Exception as e:
        logger.exception(f'Error searching objects: {str(e)}')
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/ddl', methods=['GET'])
@login_required
def get_ddl():
    schema = request.args.get('schema')
    table = request.args.get('table')
    object_type = request.args.get('type', 'table')  # 默认为表
    object_oid = request.args.get('oid')

    print(f"获取DDL - 模式: {schema}, 对象: {table}, 类型: {object_type}, oid: {object_oid}")  # 调试日志
    
    if not schema or not table:
        return jsonify({'status': 'error', 'message': '缺少必要参数'})
    
    if object_type == 'view':
        return get_view_ddl(schema, table)
    elif object_type == 'matview':
        return get_matview_ddl(schema, table)
    elif object_type == 'sequence':
        return get_sequence_ddl(schema, table)
    elif object_type == 'type':
        return get_type_ddl(schema, table)
    elif object_type == 'index':
        return get_index_ddl(schema, table)
    elif object_type == 'function':
        return get_function_ddl(schema, table, object_oid)
    elif object_type == 'procedure':
        return get_procedure_ddl(schema, table, object_oid)
    elif object_type == 'trigger':
        return get_trigger_ddl(schema, table)
    else:
        # 默认处理表
        try:
            db_config = session['db_config']
            conn = psycopg2.connect(
                host=db_config['host'],
                port=db_config['port'],
                user=db_config['username'],
                password=db_config['password'],
                database=db_config['database']
            )
            cur = conn.cursor()
            
            query = """
            WITH table_info AS (
                SELECT
                    c.oid AS table_oid,
                    c.relname AS table_name,
                    n.nspname AS schema_name,
                    pg_catalog.obj_description(c.oid) AS table_description
                FROM pg_class c
                JOIN pg_namespace n ON c.relnamespace = n.oid
                WHERE n.nspname = %s AND c.relname = %s
                  AND c.relkind = 'r'
            ),
            filtered_columns AS (
                SELECT
                    t.table_oid,
                    t.table_name,
                    t.schema_name,
                    t.table_description,
                    a.attname AS column_name,
                    pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
                    a.attnotnull AS is_not_null,
                    a.attnum,
                    pg_catalog.col_description(a.attrelid, a.attnum) AS column_description,
                    COALESCE(pg_catalog.pg_get_expr(ad.adbin, ad.adrelid), '') AS column_default
                FROM pg_attribute a
                JOIN table_info t ON a.attrelid = t.table_oid
                LEFT JOIN pg_attrdef ad ON (ad.adrelid = a.attrelid AND ad.adnum = a.attnum)
                WHERE
                    a.attnum > 0
                    AND NOT a.attisdropped
                    AND a.attname NOT IN ('ctid', 'xmin', 'xmax', 'cmin', 'cmax', 'tableoid')
            ),
            constraints AS (
                SELECT
                    con.conrelid AS table_oid,
                    con.conname AS constraint_name,
                    pg_catalog.pg_get_constraintdef(con.oid) AS constraint_def,
                    con.contype
                FROM pg_constraint con
                JOIN table_info t ON con.conrelid = t.table_oid
            ),
            table_constraints AS (
                SELECT
                    t.table_oid,
                    t.table_name,
                    t.schema_name,
                    'ALTER TABLE ' || quote_ident(t.schema_name) || '.' || quote_ident(t.table_name) || 
                    ' ADD CONSTRAINT ' || quote_ident(c.constraint_name) || ' ' || c.constraint_def || ';' AS constraint_statement
                FROM constraints c
                JOIN table_info t ON c.table_oid = t.table_oid
                WHERE c.contype IN ('p', 'u', 'f', 'c')
            ),
            column_definitions AS (
                SELECT
                    schema_name,
                    table_name,
                    table_oid,
                    string_agg(
                        '    ' || quote_ident(column_name) || ' ' || data_type ||
                        CASE WHEN is_not_null THEN ' NOT NULL' ELSE '' END ||
                        CASE WHEN column_default <> '' THEN ' DEFAULT ' || column_default ELSE '' END,
                        E',\n' ORDER BY attnum
                    ) AS column_defs
                FROM filtered_columns
                GROUP BY schema_name, table_name, table_oid
            ),
            constraint_definitions AS (
                SELECT
                    schema_name,
                    table_name,
                    table_oid,
                    string_agg(constraint_statement, E'\n' ORDER BY constraint_statement) AS constraint_defs
                FROM table_constraints
                GROUP BY schema_name, table_name, table_oid
            ),
            comment_definitions AS (
                SELECT
                    schema_name,
                    table_name,
                    table_oid,
                    CASE WHEN MAX(table_description) IS NOT NULL THEN 
                        E'\nCOMMENT ON TABLE ' || quote_ident(schema_name) || '.' || quote_ident(table_name) || 
                        ' IS ' || quote_literal(MAX(table_description)) || ';' 
                        ELSE '' END ||
                    COALESCE(
                        E'\n' || string_agg(
                            'COMMENT ON COLUMN ' || quote_ident(schema_name) || '.' || quote_ident(table_name) || '.' || quote_ident(column_name) || 
                            ' IS ' || quote_literal(column_description) || ';',
                            E'\n' ORDER BY attnum
                        ) FILTER (WHERE column_description IS NOT NULL),
                        ''
                    ) AS comment_defs
                FROM filtered_columns
                GROUP BY schema_name, table_name, table_oid
            )
            SELECT
                'CREATE TABLE ' || quote_ident(cd.schema_name) || '.' || quote_ident(cd.table_name) || ' (' || E'\n' ||
                cd.column_defs || E'\n);' ||
                COALESCE(E'\n' || con.constraint_defs, '') ||
                COALESCE(com.comment_defs, '') AS create_table_statement
            FROM column_definitions cd
            LEFT JOIN constraint_definitions con ON cd.table_oid = con.table_oid
            LEFT JOIN comment_definitions com ON cd.table_oid = com.table_oid
            WHERE cd.schema_name = %s AND cd.table_name = %s
            """
            
            cur.execute(query, (schema, table, schema, table))
            result = cur.fetchone()
            conn.close()
            
            if result is None or len(result) == 0 or result[0] is None:
                return jsonify({'status': 'error', 'message': '未找到表结构'})
            else:
                return jsonify({'status': 'success', 'ddl': result[0]})
        except Exception as e:
            return jsonify({'status': 'error', 'message': str(e)})
    
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        WITH table_info AS (
            SELECT
                c.oid AS table_oid,
                c.relname AS table_name,
                n.nspname AS schema_name,
                pg_catalog.obj_description(c.oid) AS table_description
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            WHERE n.nspname = %s AND c.relname = %s
              AND c.relkind = 'r'
        ),
        filtered_columns AS (
            SELECT
                t.table_oid,
                t.table_name,
                t.schema_name,
                t.table_description,
                a.attname AS column_name,
                pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
                a.attnotnull AS is_not_null,
                a.attnum,
                pg_catalog.col_description(a.attrelid, a.attnum) AS column_description,
                COALESCE(pg_catalog.pg_get_expr(ad.adbin, ad.adrelid), '') AS column_default
            FROM pg_attribute a
            JOIN table_info t ON a.attrelid = t.table_oid
            LEFT JOIN pg_attrdef ad ON (ad.adrelid = a.attrelid AND ad.adnum = a.attnum)
            WHERE
                a.attnum > 0
                AND NOT a.attisdropped
                AND a.attname NOT IN ('ctid', 'xmin', 'xmax', 'cmin', 'cmax', 'tableoid')
        ),
        constraints AS (
            SELECT
                con.conrelid AS table_oid,
                con.conname AS constraint_name,
                pg_catalog.pg_get_constraintdef(con.oid) AS constraint_def,
                con.contype
            FROM pg_constraint con
            JOIN table_info t ON con.conrelid = t.table_oid
        ),
        table_constraints AS (
            SELECT
                t.table_oid,
                t.table_name,
                t.schema_name,
                'ALTER TABLE ' || quote_ident(t.schema_name) || '.' || quote_ident(t.table_name) || 
                ' ADD CONSTRAINT ' || quote_ident(c.constraint_name) || ' ' || c.constraint_def || ';' AS constraint_statement
            FROM constraints c
            JOIN table_info t ON c.table_oid = t.table_oid
            WHERE c.contype IN ('p', 'u', 'f', 'c')
        ),
        column_definitions AS (
            SELECT
                schema_name,
                table_name,
                table_oid,
                string_agg(
                    '    ' || quote_ident(column_name) || ' ' || data_type ||
                    CASE WHEN is_not_null THEN ' NOT NULL' ELSE '' END ||
                    CASE WHEN column_default <> '' THEN ' DEFAULT ' || column_default ELSE '' END,
                    E',
' ORDER BY attnum
                ) AS column_defs
            FROM filtered_columns
            GROUP BY schema_name, table_name, table_oid
        ),
        constraint_definitions AS (
            SELECT
                schema_name,
                table_name,
                table_oid,
                string_agg(constraint_statement, E'
' ORDER BY constraint_statement) AS constraint_defs
            FROM table_constraints
            GROUP BY schema_name, table_name, table_oid
        ),
        comment_definitions AS (
            SELECT
                schema_name,
                table_name,
                table_oid,
                CASE WHEN MAX(table_description) IS NOT NULL THEN 
                    E'
COMMENT ON TABLE ' || quote_ident(schema_name) || '.' || quote_ident(table_name) || 
                    ' IS ' || quote_literal(MAX(table_description)) || ';' 
                    ELSE '' END ||
                COALESCE(
                    E'
' || string_agg(
                        'COMMENT ON COLUMN ' || quote_ident(schema_name) || '.' || quote_ident(table_name) || '.' || quote_ident(column_name) || 
                        ' IS ' || quote_literal(column_description) || ';',
                        E'
' ORDER BY attnum
                    ) FILTER (WHERE column_description IS NOT NULL),
                    ''
                ) AS comment_defs
            FROM filtered_columns
            GROUP BY schema_name, table_name, table_oid
        )
        SELECT
            'CREATE TABLE ' || quote_ident(cd.schema_name) || '.' || quote_ident(cd.table_name) || ' (' || E'
' ||
            cd.column_defs || E'
);' ||
            COALESCE(E'
' || con.constraint_defs, '') ||
            COALESCE(com.comment_defs, '') AS create_table_statement
        FROM column_definitions cd
        LEFT JOIN constraint_definitions con ON cd.table_oid = con.table_oid
        LEFT JOIN comment_definitions com ON cd.table_oid = com.table_oid
        WHERE cd.schema_name = %s AND cd.table_name = %s
        """
        
        print(f"执行SQL查询 - 参数: {schema}, {table}")  # 调试日志
        # 传递4个参数，WHERE子句中添加了两个新的参数占位符
        cur.execute(query, (schema, table, schema, table))
        result = cur.fetchone()
        print(f"查询结果: {result}")  # 调试日志
        conn.close()
        
        if result is None:
            print("未找到表结构 - 结果为None")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到表结构'})
        elif len(result) == 0:
            print("未找到表结构 - 结果为空元组")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到表结构'})
        elif result[0] is None:
            print("未找到表结构 - 结果第一项为None")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到表结构'})
        else:
            print(f"查询成功，返回DDL: {result[0][:100]}...")  # 调试日志，只显示前100个字符
            return jsonify({'status': 'success', 'ddl': result[0]})
    except Exception as e:
        print(f"获取DDL错误: {str(e)}")  # 调试日志
        return jsonify({'status': 'error', 'message': str(e)})

def get_view_ddl(schema, view_name):
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        WITH view_info AS (
            SELECT
                c.oid AS view_oid,
                c.relname AS view_name,
                n.nspname AS schema_name,
                pg_catalog.obj_description(c.oid) AS view_description,
                pg_catalog.pg_get_viewdef(c.oid, false) AS view_definition
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            WHERE n.nspname = %s AND c.relname = %s
              AND c.relkind = 'v'
        ),
        comment_definitions AS (
            SELECT
                v.view_oid,
                CASE WHEN v.view_description IS NOT NULL THEN 
                    E'
COMMENT ON VIEW ' || quote_ident(v.schema_name) || '.' || quote_ident(v.view_name) || 
                    ' IS ' || quote_literal(v.view_description) || ';' 
                    ELSE '' END ||
                COALESCE(
                    E'
' || string_agg(
                        'COMMENT ON COLUMN ' || quote_ident(v.schema_name) || '.' || quote_ident(v.view_name) || '.' || quote_ident(a.attname) || 
                        ' IS ' || quote_literal(pg_catalog.col_description(a.attrelid, a.attnum)) || ';',
                        E'
' ORDER BY a.attnum
                    ) FILTER (WHERE pg_catalog.col_description(a.attrelid, a.attnum) IS NOT NULL),
                    ''
                ) AS comment_defs
            FROM view_info v
            JOIN pg_attribute a ON a.attrelid = v.view_oid
            WHERE
                a.attnum > 0
                AND NOT a.attisdropped
            GROUP BY v.view_oid, v.schema_name, v.view_name, v.view_description
        )
        SELECT
            'CREATE OR REPLACE VIEW ' || quote_ident(v.schema_name) || '.' || quote_ident(v.view_name) || E' AS
' || 
            CASE WHEN v.view_definition ~ ';$' THEN v.view_definition ELSE v.view_definition || ';' END ||
            COALESCE(com.comment_defs, '') AS create_view_statement
        FROM view_info v
        LEFT JOIN comment_definitions com ON v.view_oid = com.view_oid
        WHERE v.schema_name = %s AND v.view_name = %s
        """
        
        print(f"执行视图SQL查询 - 参数: {schema}, {view_name}")
        cur.execute(query, (schema, view_name, schema, view_name))
        result = cur.fetchone()
        print(f"视图查询结果: {result}")  # 调试日志
        conn.close()
        
        if result is None or len(result) == 0 or result[0] is None:
            print("未找到视图结构")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到视图结构'})
        else:
            print(f"视图查询成功，返回DDL: {result[0][:100]}...")  # 调试日志，只显示前100个字符
            return jsonify({'status': 'success', 'ddl': result[0]})
    except Exception as e:
        print(f"获取视图DDL错误: {str(e)}")  # 调试日志
        return jsonify({'status': 'error', 'message': str(e)})

def get_matview_ddl(schema, matview_name):
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        WITH matview_info AS (
            SELECT
                c.oid AS matview_oid,
                c.relname AS matview_name,
                n.nspname AS schema_name,
                pg_catalog.obj_description(c.oid) AS matview_description,
                m.definition AS matview_definition,
                m.ispopulated
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            JOIN pg_matviews m ON m.matviewname = c.relname AND m.schemaname = n.nspname
            WHERE n.nspname = %s AND c.relname = %s
              AND c.relkind = 'm'
        ),
        matview_columns AS (
            SELECT
                m.matview_oid,
                m.matview_name,
                m.schema_name,
                m.matview_description,
                a.attname AS column_name,
                pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
                a.attnum,
                pg_catalog.col_description(a.attrelid, a.attnum) AS column_description
            FROM pg_attribute a
            JOIN matview_info m ON a.attrelid = m.matview_oid
            WHERE
                a.attnum > 0
                AND NOT a.attisdropped
        ),
        indexes AS (
            SELECT
                i.indrelid AS matview_oid,
                pg_get_indexdef(i.indexrelid) AS index_definition
            FROM pg_index i
            JOIN pg_class c ON c.oid = i.indexrelid
            WHERE c.relkind = 'i'
        ),
        comment_definitions AS (
            SELECT
                m.matview_oid,
                CASE WHEN m.matview_description IS NOT NULL THEN 
                    E'
COMMENT ON MATERIALIZED VIEW ' || quote_ident(m.schema_name) || '.' || quote_ident(m.matview_name) || 
                    ' IS ' || quote_literal(m.matview_description) || ';' 
                    ELSE '' END ||
                COALESCE(
                    E'
' || string_agg(
                        'COMMENT ON COLUMN ' || quote_ident(m.schema_name) || '.' || quote_ident(m.matview_name) || '.' || quote_ident(c.column_name) || 
                        ' IS ' || quote_literal(c.column_description) || ';',
                        E'
' ORDER BY c.attnum
                    ) FILTER (WHERE c.column_description IS NOT NULL),
                    ''
                ) AS comment_defs
            FROM matview_info m
            JOIN matview_columns c ON m.matview_oid = c.matview_oid
            GROUP BY m.matview_oid, m.schema_name, m.matview_name, m.matview_description
        ),
        index_definitions AS (
            SELECT
                matview_oid,
                string_agg(index_definition || ';', E'
') AS index_defs
            FROM indexes
            GROUP BY matview_oid
        )
        SELECT
            'CREATE MATERIALIZED VIEW ' || quote_ident(m.schema_name) || '.' || quote_ident(m.matview_name) || 
            CASE WHEN m.ispopulated THEN '' ELSE ' WITH NO DATA' END || E' AS
' || 
            m.matview_definition || ';' ||
            COALESCE(E'
' || idx.index_defs, '') ||
            COALESCE(com.comment_defs, '') AS create_matview_statement
        FROM matview_info m
        LEFT JOIN index_definitions idx ON m.matview_oid = idx.matview_oid
        LEFT JOIN comment_definitions com ON m.matview_oid = com.matview_oid
        WHERE m.schema_name = %s AND m.matview_name = %s
        """
        
        print(f"执行物化视图SQL查询 - 参数: {schema}, {matview_name}")  # 调试日志
        cur.execute(query, (schema, matview_name, schema, matview_name))
        result = cur.fetchone()
        print(f"物化视图查询结果: {result}")  # 调试日志
        conn.close()
        
        if result is None or len(result) == 0 or result[0] is None:
            print("未找到物化视图结构")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到物化视图结构'})
        else:
            print(f"物化视图查询成功，返回DDL: {result[0][:100]}...")  # 调试日志，只显示前100个字符
            return jsonify({'status': 'success', 'ddl': result[0]})
    except Exception as e:
        print(f"获取物化视图DDL错误: {str(e)}")  # 调试日志
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/types', methods=['GET'])
@login_required
def get_types():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT t.typname, n.nspname
            FROM pg_type t
            JOIN pg_namespace n ON t.typnamespace = n.oid
            WHERE n.nspname = %s 
              AND t.typtype IN ('e', 'd', 'r')
              AND n.nspname !~ '^pg_|information_schema'
            ORDER BY t.typname
        """)
        cur.execute(query, (request.args.get('schema'),))
        types = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'types': types})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

def get_type_ddl(schema, type_name):
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        WITH type_info AS (
            SELECT DISTINCT ON (t.oid)
                t.oid AS type_oid,
                t.typname AS type_name,
                n.nspname AS schema_name,
                t.typtype AS type_category,
                pg_catalog.obj_description(t.oid) AS type_description
            FROM pg_type t
            JOIN pg_namespace n ON n.oid = t.typnamespace
            WHERE 
                t.typtype IN ('e', 'd', 'r')
                AND n.nspname !~ '^pg_|information_schema'
                AND n.nspname = %s
                AND t.typname = %s
        ),
        enum_types AS (
            SELECT
                t.oid AS type_oid,
                string_agg(quote_literal(e.enumlabel), ', ' ORDER BY e.enumsortorder) AS enum_values
            FROM pg_type t
            JOIN pg_enum e ON e.enumtypid = t.oid
            JOIN pg_namespace n ON n.oid = t.typnamespace
            WHERE 
                t.typtype = 'e'
                AND n.nspname = %s
                AND t.typname = %s
            GROUP BY t.oid
        ),
        domain_types AS (
            SELECT
                t.oid AS type_oid,
                format_type(t.typbasetype, NULL) AS base_type,
                t.typnotnull AS not_null,
                t.typdefault AS default_value,
                c.collname AS collation_name,
                pg_catalog.obj_description(t.oid) AS domain_description
            FROM pg_type t
            JOIN pg_namespace n ON n.oid = t.typnamespace
            LEFT JOIN pg_collation c ON c.oid = t.typcollation
            WHERE 
                t.typtype = 'd'
                AND n.nspname = %s
                AND t.typname = %s
        ),
        range_types AS (
            SELECT
                t.oid AS type_oid,
                format_type(r.rngsubtype, NULL) AS subtype,
                c.collname AS collation_name,
                opc.opcname AS opclass_name
            FROM pg_type t
            JOIN pg_range r ON r.rngtypid = t.oid
            JOIN pg_namespace n ON n.oid = t.typnamespace
            LEFT JOIN pg_collation c ON c.oid = r.rngcollation
            LEFT JOIN pg_opclass opc ON opc.oid = r.rngsubopc
            WHERE 
                t.typtype = 'r'
                AND n.nspname = %s
                AND t.typname = %s
        )
        SELECT
            CASE 
                WHEN type_info.type_category = 'e' THEN
                    'CREATE TYPE ' || quote_ident(type_info.schema_name) || '.' || quote_ident(type_info.type_name) ||
                    ' AS ENUM (' || enum_types.enum_values || ');' ||
                    CASE WHEN type_info.type_description IS NOT NULL THEN 
                        E'\nCOMMENT ON TYPE ' || quote_ident(type_info.schema_name) || '.' || quote_ident(type_info.type_name) || 
                        ' IS ' || quote_literal(type_info.type_description) || ';' 
                        ELSE '' END
                WHEN type_info.type_category = 'd' THEN
                    'CREATE DOMAIN ' || quote_ident(type_info.schema_name) || '.' || quote_ident(type_info.type_name) ||
                    ' AS ' || domain_types.base_type ||
                    CASE WHEN domain_types.not_null THEN ' NOT NULL' ELSE '' END ||
                    CASE WHEN domain_types.default_value IS NOT NULL THEN ' DEFAULT ' || domain_types.default_value ELSE '' END ||
                    CASE WHEN domain_types.collation_name IS NOT NULL THEN ' COLLATE ' || quote_ident(domain_types.collation_name) ELSE '' END || ';' ||
                    CASE WHEN domain_types.domain_description IS NOT NULL THEN 
                        E'\nCOMMENT ON DOMAIN ' || quote_ident(type_info.schema_name) || '.' || quote_ident(type_info.type_name) || 
                        ' IS ' || quote_literal(domain_types.domain_description) || ';' 
                        ELSE '' END
                WHEN type_info.type_category = 'r' THEN
                    'CREATE TYPE ' || quote_ident(type_info.schema_name) || '.' || quote_ident(type_info.type_name) ||
                    ' AS RANGE (' ||
                    'SUBTYPE = ' || range_types.subtype ||
                    CASE WHEN range_types.collation_name IS NOT NULL THEN ', COLLATION = ' || quote_ident(range_types.collation_name) ELSE '' END ||
                    CASE WHEN range_types.opclass_name IS NOT NULL THEN ', SUBTYPE_OPCLASS = ' || quote_ident(range_types.opclass_name) ELSE '' END ||
                    ');' ||
                    CASE WHEN type_info.type_description IS NOT NULL THEN 
                        E'\nCOMMENT ON TYPE ' || quote_ident(type_info.schema_name) || '.' || quote_ident(type_info.type_name) || 
                        ' IS ' || quote_literal(type_info.type_description) || ';' 
                        ELSE '' END
                ELSE ''
            END AS create_type_statement
        FROM type_info
        LEFT JOIN enum_types ON enum_types.type_oid = type_info.type_oid
        LEFT JOIN domain_types ON domain_types.type_oid = type_info.type_oid
        LEFT JOIN range_types ON range_types.type_oid = type_info.type_oid
        """
        
        print(f"执行SQL查询 - 参数: {schema}, {type_name}")  # 调试日志
        cur.execute(query, (schema, type_name, schema, type_name, schema, type_name, schema, type_name))
        result = cur.fetchone()
        conn.close()
        
        print(f"查询结果: {result}")  # 调试日志
        
        if result is None:
            print("未找到类型结构 - 结果为None")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到类型结构'})
        elif len(result) == 0:
            print("未找到类型结构 - 结果为空元组")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到类型结构'})
        elif result[0] is None or result[0] == '':
            print("未找到类型结构 - 结果第一项为空")  # 调试日志
            return jsonify({'status': 'error', 'message': '未找到类型结构或类型定义为空'})
        else:
            print(f"查询成功，返回DDL: {result[0][:100]}...")  # 调试日志，只显示前100个字符
            return jsonify({'status': 'success', 'ddl': result[0]})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/search_functions', methods=['GET'])
@login_required
def search_functions():
    """搜索函数/存储过程，支持重载显示"""
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        search_term = request.args.get('search_term', '')
        schema = request.args.get('schema', '')
        
        # 判断是否是重载搜索
        is_overload_search = '(' in search_term
        base_name = search_term.split('(')[0].strip() if is_overload_search else search_term
        
        query = """
            SELECT 
                p.proname AS name,
                p.proname || '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')' AS display_name,
                CASE WHEN p.prokind = 'p' THEN 'procedure' ELSE 'function' END AS object_type,
                p.oid
            FROM pg_proc p
            JOIN pg_namespace n ON n.oid = p.pronamespace
            WHERE n.nspname = %s
              AND p.proname ILIKE %s
              AND n.nspname !~ '^pg_|information_schema'
            ORDER BY p.proname, pg_catalog.pg_get_function_identity_arguments(p.oid)
        """
        cur.execute(query, (schema, f"%{base_name}%"))
        functions = [{'name': row[0], 'display_name': row[1], 'type': row[2]} for row in cur.fetchall()]
        
        return jsonify({'success': True, 'data': functions})
    except Exception as e:
        return jsonify({'success': False, 'error': str(e)})
    finally:
        if 'cur' in locals(): cur.close()
        if 'conn' in locals(): conn.close()

@app.route('/functions', methods=['GET'])
@login_required
def get_functions():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT 
                p.proname AS name,
                p.proname || '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')' AS display_name,
                p.oid AS oid
            FROM pg_proc p
            JOIN pg_namespace n ON n.oid = p.pronamespace
            WHERE n.nspname = %s
              AND p.prokind = 'f'
              AND n.nspname !~ '^pg_|information_schema'
            ORDER BY p.proname, pg_catalog.pg_get_function_identity_arguments(p.oid)
        """)
        cur.execute(query, (request.args.get('schema'),))
        functions = [{'name': row[0], 'display_name': row[1], 'oid': row[2]} for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'functions': functions})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/procedures', methods=['GET'])
@login_required
def get_procedures():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT 
                p.proname AS name,
                p.proname || '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')' AS display_name,
                p.oid AS oid
            FROM pg_proc p
            JOIN pg_namespace n ON n.oid = p.pronamespace
            WHERE n.nspname = %s
              AND p.prokind = 'p'
              AND n.nspname !~ '^pg_|information_schema'
            ORDER BY p.proname, pg_catalog.pg_get_function_identity_arguments(p.oid)
        """)
        cur.execute(query, (request.args.get('schema'),))
        procedures = [{'name': row[0], 'display_name': row[1], 'oid': row[2]} for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'procedures': procedures})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/triggers', methods=['GET'])
@login_required
def get_triggers():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT t.tgname
            FROM pg_trigger t
            JOIN pg_class c ON c.oid = t.tgrelid
            JOIN pg_namespace n ON n.oid = c.relnamespace
            WHERE n.nspname = %s
              AND NOT t.tgisinternal
              AND n.nspname !~ '^pg_|information_schema'
            ORDER BY t.tgname
        """)
        cur.execute(query, (request.args.get('schema'),))
        triggers = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'triggers': triggers})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

@app.route('/sequences', methods=['GET'])
@login_required
def get_sequences():
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        query = sql.SQL("""
            SELECT c.relname 
            FROM pg_class c
            JOIN pg_namespace n ON c.relnamespace = n.oid
            WHERE n.nspname = %s AND c.relkind = 'S'
            ORDER BY c.relname
        """)
        cur.execute(query, (request.args.get('schema'),))
        sequences = [row[0] for row in cur.fetchall()]
        conn.close()
        return jsonify({'status': 'success', 'sequences': sequences})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

def get_index_ddl(schema, table_name):
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        SELECT 
            'CREATE' ||
            CASE WHEN ix.indisunique THEN ' UNIQUE' ELSE '' END ||  -- 修正为 ix.indisunique
            CASE WHEN ix.indisprimary THEN ' PRIMARY KEY' ELSE ' INDEX' END ||  -- 修正为 ix.indisprimary
            ' ' || quote_ident(n.nspname) || '.' || quote_ident(ic.relname) ||
            ' ON ' || quote_ident(n.nspname) || '.' || quote_ident(c.relname) ||
            ' USING ' || am.amname || ' (' || 
                (SELECT string_agg(
                    CASE WHEN a.attname IS NOT NULL THEN quote_ident(a.attname)
                         ELSE pg_catalog.pg_get_expr(ix.indexprs, ix.indrelid) END,
                    ', ' ORDER BY ic.ordinality
                ) FROM unnest(ix.indkey) WITH ORDINALITY AS ic(key, ordinality)
                LEFT JOIN pg_attribute a ON a.attrelid = ix.indrelid AND a.attnum = ic.key) || ')' ||
            CASE WHEN ix.indpred IS NOT NULL THEN ' WHERE ' || pg_catalog.pg_get_expr(ix.indpred, ix.indrelid) ELSE '' END ||
            CASE WHEN ic.reloptions IS NOT NULL THEN ' WITH (' || 
                (SELECT string_agg(option_name || '=' || option_value, ', ') 
                 FROM pg_options_to_table(ic.reloptions)) || ')' ELSE '' END ||
            CASE WHEN ts.spcname IS NOT NULL THEN ' TABLESPACE ' || quote_ident(ts.spcname) ELSE '' END || ';' ||
            CASE WHEN d.description IS NOT NULL THEN 
                E'\nCOMMENT ON INDEX ' || quote_ident(n.nspname) || '.' || quote_ident(ic.relname) || 
                ' IS ' || quote_literal(d.description) || ';' 
                ELSE '' END AS create_index_ddl
        FROM pg_index ix
        JOIN pg_class c ON c.oid = ix.indrelid
        JOIN pg_class ic ON ic.oid = ix.indexrelid
        JOIN pg_namespace n ON n.oid = ic.relnamespace
        JOIN pg_am am ON am.oid = ic.relam
        LEFT JOIN pg_tablespace ts ON ts.oid = ic.reltablespace
        LEFT JOIN pg_description d ON d.objoid = ic.oid AND d.classoid = 'pg_class'::regclass
        WHERE c.relkind IN ('r', 'm', 'p')
          AND ic.relkind IN ('i', 'I')
          AND n.nspname = %s
          AND c.relname = %s
          AND n.nspname !~ '^pg_|information_schema'
        ORDER BY n.nspname, c.relname, ic.relname;
        """
        
        cur.execute(query, (schema, table_name))
        results = cur.fetchall()
        conn.close()
        
        if not results or len(results) == 0:
            return jsonify({'status': 'error', 'message': '未找到索引信息'})
        
        # 合并所有索引的DDL语句
        ddl = '\n\n'.join([row[0] for row in results])
        return jsonify({'status': 'success', 'ddl': ddl})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

def get_function_ddl(schema, function_name, object_oid):
    try:
        # 验证oid参数
        if not object_oid:
            return jsonify({'status': 'error', 'message': '缺少函数OID参数'})
        
        # 确保oid是有效的数字
        try:
            oid_num = int(object_oid)
        except ValueError:
            return jsonify({'status': 'error', 'message': f'无效的OID值: {object_oid}'})
        
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        SELECT 
            pg_catalog.pg_get_functiondef(p.oid) || E'\n' ||
            CASE WHEN d.description IS NOT NULL THEN 
                'COMMENT ON FUNCTION ' || quote_ident(n.nspname) || '.' || quote_ident(p.proname) || 
                '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')' || 
                ' IS ' || quote_literal(d.description) || ';' 
                ELSE '' END AS function_ddl
        FROM pg_catalog.pg_proc p
        JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
        JOIN pg_catalog.pg_language l ON l.oid = p.prolang
        LEFT JOIN pg_catalog.pg_description d ON d.objoid = p.oid AND d.classoid = 'pg_proc'::regclass
        WHERE n.nspname = %s
          AND p.oid = %s  
          AND p.prokind = 'f'
          AND n.nspname !~ '^pg_|information_schema'
        """
        
        cur.execute(query, (schema, oid_num))
        result = cur.fetchone()
        conn.close()
        
        if result is None or len(result) == 0 or result[0] is None:
            return jsonify({'status': 'error', 'message': '未找到函数定义'})
        else:
            """
            这里使用替换的方式确保生成函数或者存储过程时，$ 符号和 function、procedure 之间有空格。
            实际上这里不需要转换，测试过程中发现有空格是由于sql format 影响的，因此在 index.html 中 sql format
            后再做替换即可
            """
            ddl = re.sub(r'\$\s*function\s*\$', '$function$', result[0])
            ddl = re.sub(r'\$\s*procedure\s*\$', '$procedure$', ddl)
            return jsonify({'status': 'success', 'ddl': ddl})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

def get_procedure_ddl(schema, procedure_name, object_oid):
    try:
        # 添加oid验证
        if object_oid:
            try:
                oid_num = int(object_oid)
            except ValueError:
                return jsonify({'status': 'error', 'message': f'无效的OID值: {object_oid}'})
        else:
            return jsonify({'status': 'error', 'message': '缺少OID参数'})
        
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        SELECT 
            pg_catalog.pg_get_functiondef(p.oid) || E'\n' ||
            CASE WHEN d.description IS NOT NULL THEN 
                'COMMENT ON PROCEDURE ' || quote_ident(n.nspname) || '.' || quote_ident(p.proname) || 
                '(' || pg_catalog.pg_get_function_arguments(p.oid) || ')' || 
                ' IS ' || quote_literal(d.description) || ';' 
                ELSE '' END AS procedure_ddl
        FROM pg_catalog.pg_proc p
        JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
        JOIN pg_catalog.pg_language l ON l.oid = p.prolang
        LEFT JOIN pg_catalog.pg_description d ON d.objoid = p.oid AND d.classoid = 'pg_proc'::regclass
        WHERE n.nspname = %s
          AND p.oid = %s
          AND p.prokind = 'p'
          AND n.nspname !~ '^pg_|information_schema'
        """
        
        cur.execute(query, (schema, oid_num))
        result = cur.fetchone()
        conn.close()
        
        if result is None or len(result) == 0 or result[0] is None:
            return jsonify({'status': 'error', 'message': '未找到存储过程定义'})
        else:
            # 由于输出的存储过程中包含 $ 符号，因此在后续格式化 SQL DDL 结果的时候，会出现$ procedure $这种情况，导致无法在数据库
            # 中执行，因此这里先替换，实际上，这里也没有必要替换，因此输出的结果是格式化后的，因此只需要在前端格式化后，再替换就可以
            ddl = re.sub(r'\$\s*procedure\s*\$', '$procedure$', result[0])
            ddl = re.sub(r'\$\s*function\s*\$', '$function$', ddl)
            return jsonify({'status': 'success', 'ddl': ddl})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

def get_trigger_ddl(schema, trigger_name):
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        WITH trigger_function AS (
            SELECT
                pg_catalog.pg_get_functiondef(p.oid) || E'\n' ||
                CASE WHEN d.description IS NOT NULL THEN 
                    'COMMENT ON FUNCTION ' || quote_ident(n.nspname) || '.' || quote_ident(p.proname) || E'()' || 
                    ' IS ' || quote_literal(d.description) || ';' 
                    ELSE '' END AS function_ddl
            FROM pg_catalog.pg_proc p
            JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
            JOIN pg_catalog.pg_language l ON l.oid = p.prolang
            LEFT JOIN pg_catalog.pg_description d ON d.objoid = p.oid AND d.classoid = 'pg_proc'::regclass
            WHERE n.nspname = %s
              AND p.prorettype = 'trigger'::regtype
              AND n.nspname !~ '^pg_|information_schema'
        ),
        trigger_definition AS (
            SELECT
                'CREATE TRIGGER ' || quote_ident(t.tgname) || E'\n' ||
                '    ' || 
                CASE 
                    WHEN t.tgtype & 1 > 0 THEN 'BEFORE' 
                    WHEN t.tgtype & 64 > 0 THEN 'INSTEAD OF'
                    ELSE 'AFTER' 
                END || ' ' ||
                CASE 
                    WHEN t.tgtype & 2 > 0 THEN 'INSERT' 
                    WHEN t.tgtype & 4 > 0 THEN 'DELETE'
                    WHEN t.tgtype & 8 > 0 THEN 'UPDATE'
                    WHEN t.tgtype & 16 > 0 THEN 'TRUNCATE'
                END || E'\n' ||
                '    ON ' || quote_ident(n.nspname) || '.' || quote_ident(c.relname) || E'\n' ||
                '    FOR EACH ' || 
                    CASE WHEN t.tgtype & 32 > 0 THEN 'STATEMENT' ELSE 'ROW' END || E'\n' ||
                CASE WHEN t.tgqual IS NOT NULL THEN 
                    '    WHEN (' || pg_catalog.pg_get_triggerdef(t.oid, true) || ')' || E'\n' 
                    ELSE '' END ||
                '    EXECUTE FUNCTION ' || quote_ident(fn.nspname) || '.' || quote_ident(p.proname) || E'();\n' ||
                CASE WHEN d.description IS NOT NULL THEN 
                    'COMMENT ON TRIGGER ' || quote_ident(t.tgname) || ' ON ' || 
                    quote_ident(n.nspname) || '.' || quote_ident(c.relname) || 
                    ' IS ' || quote_literal(d.description) || ';' 
                    ELSE '' END AS trigger_ddl
            FROM pg_catalog.pg_trigger t
            JOIN pg_catalog.pg_class c ON c.oid = t.tgrelid
            JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
            JOIN pg_catalog.pg_proc p ON p.oid = t.tgfoid
            JOIN pg_catalog.pg_namespace fn ON fn.oid = p.pronamespace
            LEFT JOIN pg_catalog.pg_description d ON d.objoid = t.oid AND d.classoid = 'pg_trigger'::regclass
            WHERE NOT t.tgisinternal
              AND n.nspname = %s
              AND t.tgname = %s
              AND n.nspname !~ '^pg_|information_schema'
        )
        SELECT function_ddl AS ddl FROM trigger_function
        UNION ALL
        SELECT trigger_ddl AS ddl FROM trigger_definition
        """
        
        cur.execute(query, (schema, schema, trigger_name))
        results = cur.fetchall()
        conn.close()
        
        if not results or len(results) == 0:
            return jsonify({'status': 'error', 'message': '未找到触发器定义'})
        
        # 合并所有DDL语句
        ddl = '\n\n'.join([row[0] for row in results])
        return jsonify({'status': 'success', 'ddl': ddl})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})

def get_sequence_ddl(schema, sequence_name):
    try:
        db_config = session['db_config']
        conn = psycopg2.connect(
            host=db_config['host'],
            port=db_config['port'],
            user=db_config['username'],
            password=db_config['password'],
            database=db_config['database']
        )
        cur = conn.cursor()
        
        query = """
        WITH sequence_info AS (
            SELECT
                n.nspname AS schema_name,
                c.relname AS sequence_name,
                s.seqstart AS start_value,
                s.seqincrement AS increment_by,
                s.seqmax AS max_value,
                s.seqmin AS min_value,
                s.seqcache AS cache_size,
                s.seqcycle AS is_cycled,
                pg_catalog.obj_description(c.oid) AS sequence_description,
                d.refobjid AS owned_by_table_oid,
                d.refobjsubid AS owned_by_column_attnum
            FROM pg_class c
            JOIN pg_namespace n ON n.oid = c.relnamespace
            JOIN pg_sequence s ON s.seqrelid = c.oid
            LEFT JOIN pg_depend d ON d.objid = c.oid AND d.deptype = 'a'
            WHERE c.relkind = 'S'
              AND n.nspname = %s
              AND c.relname = %s
        ),
        owned_by_info AS (
            SELECT
                s.schema_name,
                s.sequence_name,
                CASE WHEN s.owned_by_table_oid IS NOT NULL THEN
                    quote_ident(tbl.relnamespace::regnamespace::text) || '.' || quote_ident(tbl.relname) || '.' || quote_ident(att.attname)
                ELSE NULL END AS owned_by_column
            FROM sequence_info s
            LEFT JOIN pg_class tbl ON tbl.oid = s.owned_by_table_oid
            LEFT JOIN pg_attribute att ON att.attrelid = s.owned_by_table_oid AND att.attnum = s.owned_by_column_attnum
        )
        SELECT
            'CREATE SEQUENCE ' || quote_ident(s.schema_name) || '.' || quote_ident(s.sequence_name) || E'\n' ||
            '    START WITH ' || s.start_value || E'\n' ||
            '    INCREMENT BY ' || s.increment_by || E'\n' ||
            '    ' || CASE WHEN s.min_value = 1 THEN 'NO MINVALUE' ELSE 'MINVALUE ' || s.min_value END || E'\n' ||
            '    ' || CASE WHEN s.max_value = 9223372036854775807 THEN 'NO MAXVALUE' ELSE 'MAXVALUE ' || s.max_value END || E'\n' ||
            '    CACHE ' || s.cache_size || E'\n' ||
            '    ' || CASE WHEN s.is_cycled THEN 'CYCLE' ELSE 'NO CYCLE' END || E'\n' ||
            COALESCE('    OWNED BY ' || o.owned_by_column || E'\n', '') ||
            ';' ||
            CASE WHEN s.sequence_description IS NOT NULL THEN 
                E'\nCOMMENT ON SEQUENCE ' || quote_ident(s.schema_name) || '.' || quote_ident(s.sequence_name) || 
                ' IS ' || quote_literal(s.sequence_description) || ';' 
                ELSE '' END AS create_sequence_statement
        FROM sequence_info s
        JOIN owned_by_info o ON s.schema_name = o.schema_name AND s.sequence_name = o.sequence_name
        WHERE s.schema_name = %s AND s.sequence_name = %s
        """
        
        cur.execute(query, (schema, sequence_name, schema, sequence_name))
        result = cur.fetchone()
        conn.close()
        
        if result is None or len(result) == 0 or result[0] is None:
            return jsonify({'status': 'error', 'message': '未找到序列结构'})
        else:
            return jsonify({'status': 'success', 'ddl': result[0]})
    except Exception as e:
        return jsonify({'status': 'error', 'message': str(e)})


if __name__ == '__main__':
    logger.info(f'PosgreSQL对象结构获取工具 starting in {"DEBUG" if args.debug else "NORMAL"} 模式')
    
    # 初始化SSL证书路径为None
    ssl_cert = None
    ssl_key = None
    
    # 定义配置文件路径
    config_dir = os.path.join(os.getcwd(), 'config')
    config_file = os.path.join(config_dir, 'config.ini')
    
    # 检查配置文件是否存在
    if os.path.exists(config_file):
        try:
            # 创建配置解析器并读取配置文件
            config = configparser.ConfigParser()
            config.read(config_file)
            
            # 从配置文件中获取SSL证书和私钥路径
            if 'SSL' in config and 'server_crt' in config['SSL'] and 'server_key' in config['SSL']:
                """
                使用os.path.normpath规范化路径格式
                目的在于自动转换配置文件中的 / 为 Windows 中的 \
                """
                ssl_cert = os.path.normpath(config['SSL']['server_crt'])
                ssl_key = os.path.normpath(config['SSL']['server_key'])
                logger.info(f'从 {config_file} 文件中读取 SSL 配置')
                logger.info(f'规范化 cert 文件路径: {ssl_cert}')
                logger.info(f'规范化 key  文件路径: {ssl_key}')
            else:
                logger.warning(f'{config_file} 文件中的 SSL 配置无效, 缺少值')
        except Exception as e:
            logger.error(f'{config_file} 文件读取失败: {str(e)}')
    else:
        logger.info(f'{config_file} 文件中没有找到 SSL 配置')
    
    # 检查证书和密钥文件是否存在
    if ssl_cert and ssl_key:
        logger.info(f'检查服务器证书路径: {ssl_cert}, 存在: {os.path.exists(ssl_cert)}')
        logger.info(f'检查服务器私钥路径: {ssl_key},  存在: {os.path.exists(ssl_key)}')
    
    # 启动HTTPS服务（单向验证）
    ssl_context = None
    protocol = 'HTTP'
    
    if ssl_cert and ssl_key and os.path.exists(ssl_cert) and os.path.exists(ssl_key):
        try:
            # 尝试使用更兼容Windows的SSL上下文配置
            context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
            context.load_cert_chain(certfile=ssl_cert, keyfile=ssl_key)
            ssl_context = context
            logger.info(f'创建可兼容 Windows 的 SSL 上下文成功')
        except Exception as e:
            logger.error(f'创建可兼容 Windows 的 SSL 上下文失败: {str(e)}')
            # 如果失败，回退到原来的方法
            ssl_context = (ssl_cert, ssl_key)
            logger.info(f'回退默认的 SSL 方法')
        
        protocol = 'HTTPS'
        logger.info('服务器将以 HTTPS 模式启动')
    else:
        logger.info('SSL 证书未找到或者无效, 服务器将以 HTTP 模式启动')

    # 修复端口配置读取逻辑
    try:
        if config.has_section('PORT') and config.has_option('PORT', 'port'):
            port = config.getint('PORT', 'port')
            logger.info(f'从 {config_file} 配置文件中读取端口 {port}')
            if port < 5001:
                logger.warning('Windows 系统建议使用 5001 以上的端口,端口范围[5001-65535]')
        else:
            port = 7363  # 默认端口
            logger.info(f'使用默认的端口 {port},Windows 系统建议使用 5001 以上的端口,端口范围[5001-65535]')
    except Exception as e:
        port = 7363  # 未配置端口时，使用默认端口
        logger.error(f'从配置文件 {config_file} 读取端口失败: {str(e)}, 使用默认端口 {port}')
    try:
        # 获取所有网络接口的IP地址
        hostname = socket.gethostname()
        ip_addresses = socket.getaddrinfo(hostname, None, socket.AF_INET)
        unique_ips = set()
        for addrinfo in ip_addresses:
            unique_ips.add(addrinfo[4][0])
        
        # 显示所有可用IP地址
        logger.info(f'服务器主机名: {hostname}')
        logger.info(f"可用的 IP 地址: {', '.join(sorted(unique_ips))}")
        logger.info(f'服务器监听所有网络地址 (0.0.0.0:{port})')
        logger.info(f"访问标识: {' or '.join([f'{protocol}://{ip}:{port}' for ip in sorted(unique_ips)])}")
    except Exception as e:
        logger.error(f'服务器地址获取失败: {str(e)}')
        logger.info(f'服务器监听所有网络地址 (0.0.0.0:{port})')

    app.run(
        debug=args.debug,  # 使用命令行参数控制debug模式
        host='0.0.0.0',
        port=port,
        ssl_context=ssl_context
    )
    logger.info(f'服务以 {protocol} 启动')  # 这行不会执行，因为app.run()是阻塞的
