package com.zcyr.core.sql;

import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import com.zcyr.core.sql.statement.OGCreateSchemaStatement;
import com.zcyr.core.sql.statement.OGDropSchemaStatement;

import java.util.ArrayList;
import java.util.List;

/**
 * 解析opengauss的sql语法解析器, 大多数还是依赖PG的解析器
 */
public class OGSQLStatementParser extends PGSQLStatementParser implements IOGSQLStatementParser{
    public OGSQLStatementParser(String sql) {
        super(new OGExprParser(sql));
    }

    /**
     * 语法解析
     */
    @Override
    public void parseStatementList(List<SQLStatement> statementList, int max, SQLObject parent) {
        super.parseStatementList(statementList, max, parent);
    }

    @Override
    public SQLStatement parseCreate() {
        char markChar = lexer.current();
        int markBp = lexer.bp();

        List<String> comments = null;
        if (lexer.isKeepComments() && lexer.hasComment()) {
            comments = lexer.readAndResetComments();
        }

        accept(Token.CREATE);

        Token token = lexer.token();

        if (token == Token.TABLE || lexer.identifierEquals("GLOBAL")) {
            SQLCreateTableParser createTableParser = getSQLCreateTableParser();
            SQLCreateTableStatement stmt = createTableParser.parseCreateTable(false);

            if (comments != null) {
                stmt.addBeforeComment(comments);
            }

            return stmt;
        } else if (token == Token.INDEX //
                || token == Token.UNIQUE //
                || lexer.identifierEquals("NONCLUSTERED") // sql server
        ) {
            return parseCreateIndex(false);
        } else if (lexer.token()
                == Token.SEQUENCE) {
            return parseCreateSequence(false);
        } else if (token == Token.OR) {
            lexer.nextToken();
            accept(Token.REPLACE);

            if (lexer.identifierEquals("FORCE")) {
                lexer.nextToken();
            }
            if (lexer.token()
                    == Token.PROCEDURE) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateProcedure();
            }

            if (lexer.token()
                    == Token.VIEW) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateView();
            }

            if (lexer.token()
                    == Token.TRIGGER) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateTrigger();
            }

            if (lexer.token()
                    == Token.FUNCTION) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateFunction();
            }

            if (lexer.identifierEquals(FnvHash.Constants.PACKAGE)) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreatePackage();
            }

            if (lexer.identifierEquals(FnvHash.Constants.TYPE)) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateType();
            }

            if (lexer.identifierEquals(FnvHash.Constants.PUBLIC)) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateSynonym();
            }

            if (lexer.identifierEquals(FnvHash.Constants.SYNONYM)) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateSynonym();
            }

            // lexer.reset(mark_bp, mark_ch, Token.CREATE);
            throw new ParserException("TODO " + lexer.info());
        } else if (token == Token.DATABASE) {
            lexer.nextToken();
            if (lexer.identifierEquals("LINK")) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateDbLink();
            }

            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateDatabase();
        } else if (lexer.token()
                == Token.USER) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateUser();
        } else if (lexer.identifierEquals(FnvHash.Constants.PUBLIC)) {
            lexer.nextToken();
            if (lexer.identifierEquals("SYNONYM")) {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateSynonym();
            } else {
                lexer.reset(markBp, markChar, Token.CREATE);
                return parseCreateDbLink();
            }
        } else if (lexer.identifierEquals("SHARE")) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateDbLink();
        } else if (lexer.identifierEquals("SYNONYM")) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateSynonym();
        } else if (token == Token.VIEW) {
            return parseCreateView();
        } else if (token == Token.TRIGGER) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateTrigger();
        } else if (token == Token.PROCEDURE) {
            SQLCreateProcedureStatement stmt = parseCreateProcedure();
            stmt.setCreate(true);
            return stmt;
        } else if (token == Token.FUNCTION) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return this.parseCreateFunction();
        } else if (lexer.identifierEquals(FnvHash.Constants.BITMAP)) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateIndex(true);
        } else if (lexer.identifierEquals(FnvHash.Constants.MATERIALIZED)) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateMaterializedView();
        } else if (lexer.identifierEquals(FnvHash.Constants.TYPE)) {
            lexer.reset(markBp, markChar, Token.CREATE);
            return parseCreateType();
        }
        // 兼容opengauss
        else if (token == Token.SCHEMA){
            return parseCreateSchema();
        }

        throw new ParserException("TODO " + lexer.token()
        );
    }

    @Override
    public SQLStatement parseDrop() {
        List<String> beforeComments = null;
        if (lexer.isKeepComments() && lexer.hasComment()) {
            beforeComments = lexer.readAndResetComments();
        }

        lexer.nextToken();

        final SQLStatement stmt;

        List<SQLCommentHint> hints = null;
        if (lexer.token() == Token.HINT) {
            hints = this.exprParser.parseHints();
        }

        if (lexer.token() == Token.TABLE || lexer.identifierEquals("TEMPORARY")) {
            SQLDropTableStatement dropTable = parseDropTable(false);
            if (hints != null) {
                dropTable.setHints(hints);
            }
            stmt = dropTable;
        } else if (lexer.token() == Token.USER) {
            stmt = parseDropUser();
        } else if (lexer.token() == Token.INDEX) {
            stmt = parseDropIndex();
        } else if (lexer.token() == Token.VIEW) {
            stmt = parseDropView(false);
        } else if (lexer.token() == Token.TRIGGER) {
            stmt = parseDropTrigger(false);
        } else if (lexer.token() == Token.SCHEMA){
            stmt = parseDropSchema(false);
        } else if (lexer.token() == Token.DATABASE ) {
            stmt = parseDropDatabase(false);
        } else if (lexer.token() == Token.FUNCTION) {
            stmt = parseDropFunction(false);
        } else if (lexer.token() == Token.TABLESPACE) {
            stmt = parseDropTablespace(false);

        } else if (lexer.token() == Token.PROCEDURE) {
            stmt = parseDropProcedure(false);

        } else if (lexer.token() == Token.SEQUENCE) {
            stmt = parseDropSequence(false);

        } else if (lexer.identifierEquals(FnvHash.Constants.EVENT)) {
            stmt = parseDropEvent();

        } else if (lexer.identifierEquals(FnvHash.Constants.LOGFILE)) {
            stmt = parseDropLogFileGroup();

        } else if (lexer.identifierEquals(FnvHash.Constants.SERVER)) {
            stmt = parseDropServer();

        } else {
            throw new ParserException("TODO " + lexer.info());
        }

        if (beforeComments != null) {
            stmt.addBeforeComment(beforeComments);
        }
        return stmt;
    }

    protected OGDropSchemaStatement parseDropSchema(boolean acceptDrop) {
        if (acceptDrop) {
            accept(Token.DROP);
        }

        OGDropSchemaStatement stmt = new OGDropSchemaStatement();

        if (lexer.token() == Token.SCHEMA) {
            lexer.nextToken();
        } else {
            accept(Token.DATABASE);
        }

        if (lexer.token() == Token.IF) {
            lexer.nextToken();
            accept(Token.EXISTS);
            stmt.setIfExists(true);
        }

        ArrayList<SQLName> names = new ArrayList<>();

        this.exprParser.names(names);

        stmt.setSchemas(names);
        return stmt;
    }

    @Override
    public SQLStatement parseCreateUser() {
        accept(Token.CREATE);
        accept(Token.USER);

        SQLCreateUserStatement stmt = new SQLCreateUserStatement();
        stmt.setUser(this.exprParser.name());

        if (lexer.token() == Token.PASSWORD){
            accept(Token.PASSWORD);
        }else{
            accept(Token.IDENTIFIED);
            accept(Token.BY);
        }

        stmt.setPassword(this.exprParser.primary());

        return stmt;
    }

    @Override
    public SQLStatement parseCreateSchema() {
        if (lexer.token() == Token.CREATE) {
            lexer.nextToken();
        }

        accept(Token.SCHEMA);

        OGCreateSchemaStatement stmt = new OGCreateSchemaStatement(getDbType());
        stmt.setSchema(this.exprParser.name());
        return stmt;
    }
}
