package io.github.chutian0610.sqlparser.token

import arrow.core.*
import arrow.core.raise.either
import io.github.chutian0610.sqlparser.Location
import io.github.chutian0610.sqlparser.dialect.*
import io.github.chutian0610.sqlparser.fromUnicodeCodePoints
import io.github.chutian0610.sqlparser.token.CodePoint.Companion.equalsChar
import io.github.chutian0610.sqlparser.token.CodePoint.Companion.inCharArray
import io.github.chutian0610.sqlparser.token.Token.EOF

/**
 * SQL Tokenizer
 */
class Tokenizer(
    /**
     *  When true (default) the tokenizer unescapes literal values
     *  (for example, `""` in SQL is unescaped to the literal `"`).
     *
     *  When false, the tokenizer provides the raw strings as provided
     *  in the query.  This can be helpful for programs that wish to
     *  recover the exact original query text without normalizing
     *  the escaping
     */
    var unescape: Boolean,
    val dialect: Dialect
) {
    companion object {
        /**
         * tokenizer with unescape enabled and the provided dialect
         */
        fun new(dialect: Dialect) = Tokenizer(true, dialect)
    }

    fun withUnescape(unescape: Boolean) = apply { this.unescape = unescape }

    /**
     * Tokenize the statement and produce a vector of tokens
     */
    fun tokenize(query: String): Either<TokenizerError, List<Token>> = either {
        val tokens = tokenizeWithLocation(query).bind()
        tokens.map { it.token }
    }

    /**
     *  Tokenize the statement and produce a vector of tokens with location information
     */
    fun tokenizeWithLocation(query: String): Either<TokenizerError, List<TokenWithLocation>> {
        val tokenizerContext = TokenizerContext.fromString(query)
        return tokenizeWithContext(tokenizerContext).map { tokenizerContext.tokens() }
    }

    /**
     * Tokenize the statement and append tokens with location information into the provided buffer in Context.
     * If an error is thrown, the buffer will contain all tokens that were successfully parsed before the error.
     */
    fun tokenizeWithContext(context: TokenizerContext): Either<TokenizerError, Unit> {
        var location = context.location()
        var continueLoop: Boolean
        do {
            val token = nextToken(context, context.lastToken())
            if (token.isLeft()) {
                // tokenize error, fast end
                return token.flatMap { Either.Right(Unit) }
            }
            if (token.isRight { it == EOF }) {
                // tokenize success && result null(EOF), fast end
                continueLoop = false
            } else {
                // tokenize success && result not null, continue
                continueLoop = true
                val section = location.toSection(context.location())
                context.tokens()
                    .add(TokenWithLocation(requireNotNull(token.getOrNull()) { "impossible null case" }, section))
                location = context.location()
            }
        } while (continueLoop)

        return Either.Right(Unit)
    }

    /**
     * Get the next token or return null
     */
    fun nextToken(context: TokenizerContext, prevToken: Option<Token>): Either<TokenizerError, Token> {
        val char = context.peek()
        return when {
            /*  ======== EOF ======== */
            char == null -> Either.Right(EOF)
            /*  ======== WhiteSpace ======== */
            inCharArray(char, ' ') -> consumeAndReturn(context, Token.WhiteSpace(WhiteSpaceEnum.Space))
            inCharArray(char, '\t') -> consumeAndReturn(context, Token.WhiteSpace(WhiteSpaceEnum.Tab))
            inCharArray(char, '\n') -> consumeAndReturn(context, Token.WhiteSpace(WhiteSpaceEnum.NewLine))
            inCharArray(char, '\r') -> {
                // Emit a single Whitespace::Newline token for \r and \r\n
                context.next()
                if (CodePoint.equalsChar(context.peek(), '\n')) {
                    context.next()
                }
                Either.Right(Token.WhiteSpace(WhiteSpaceEnum.NewLine))
            }
            /*  ======== String Literal ======== */
            // mysql use `b` or `B` for byte string literal
            dialectIn(this.dialect, MySQL::class, GenericDialect::class)
                    && inCharArray(char, 'b', 'B') -> {
                context.next()
                val ch = context.peek()// consume 'b' or 'B'
                when {
                    inCharArray(ch, '\'') -> {
                        return tokenizeSingleCharQuotedString(context, '\'', false)
                            .map { Token.Literal(LiteralEnum.SingleQuotedByteStringLiteral(it)) }
                    }

                    inCharArray(ch, '"') -> {
                        return tokenizeSingleCharQuotedString(context, '\"', false)
                            .map { Token.Literal(LiteralEnum.DoubleQuotedByteStringLiteral(it)) }
                    }

                    else -> {
                        // regular identifier starting with an "b" or "B"
                        val s = tokenizeWord(context, char.toUnicodeChar())
                        return Either.Right(Token.makeWord(s, null))
                    }
                }
            }
            // PostgreSQL accepts "escape" string constants, which are an extension to the SQL standard.
            this.dialect.supportsStringEscapeConstant() && inCharArray(char, 'e', 'E') -> {
                val startLocation = context.location()
                context.next() // consume, to check the next char
                val ch = context.peek()
                when {
                    inCharArray(ch, '\'') -> {
                        return tokenizeEscapedSingleQuotedString(context, startLocation)
                            .map { Token.Literal(LiteralEnum.EscapedStringLiteral(it)) }
                    }

                    else -> {
                        // regular identifier starting with an "E" or "e"
                        val s = tokenizeWord(context, char.toUnicodeChar())
                        return Either.Right(Token.makeWord(s, null))
                    }
                }

            }
            // Unicode string literals like U&'first \000A second' are supported in some dialects, including PostgreSQL
            this.dialect.supportsUnicodeStringLiteral() && inCharArray(char, 'u', 'U') -> {
                context.next() // consume, to check the next char
                if (inCharArray(context.peek(), '&')) {
                    // we cannot advance the iterator here, as we need to consume the '&' later if the 'u' was an identifier
                    val contextSlice = context.slice()
                    // consume the '&' in the clone
                    contextSlice.next()
                    if (inCharArray(contextSlice.peek(), '\'')) {
                        context.next() // consume the '&' in the original iterator
                        return rawUnicodeSingleQuotedString(context).map {
                            Token.Literal(
                                LiteralEnum.UnicodeRawStringLiteral(
                                    it
                                )
                            )
                        }
                    }
                }
                // regular identifier starting with an "u" or "U"
                val s = tokenizeWord(context, char.toUnicodeChar())
                return Either.Right(Token.makeWord(s, null))
            }
            // The spec only allows an uppercase 'X' to introduce a hex
            // string, but PostgreSQL, at least, allows a lowercase 'x' too.
            inCharArray(char, 'x', 'X') -> {
                context.next() // consume, to check the next char
                val ch = context.peek()
                when {
                    inCharArray(ch, '\'') -> {
                        // X'...' - a <binary string literal>
                        tokenizeSingleCharQuotedString(context, '\'', true)
                            .map { Token.Literal(LiteralEnum.HexStringLiteral(it)) }
                    }

                    else -> {
                        // regular identifier starting with an "X"
                        val s = tokenizeWord(context, char.toUnicodeChar())
                        return Either.Right(Token.makeWord(s, null))
                    }
                }
            }

            inCharArray(char, '\'') -> {
                // single-quoted string
                tokenizeSingleCharQuotedString(context, '\'', this.dialect.supportsStringLiteralBackslashEscape())
                    .map { Token.Literal(LiteralEnum.SingleQuotedStringLiteral(it)) }
            }

            inCharArray(char, '"') && !this.dialect.isDelimitedIdentifierStart(char)
                    && !this.dialect.isIdentifierStart(char) -> {
                // double-quoted string
                tokenizeSingleCharQuotedString(context, '"', this.dialect.supportsStringLiteralBackslashEscape())
                    .map { Token.Literal(LiteralEnum.DoubleQuotedStringLiteral(it)) }
            }
            /*  ======== Quoted Identifier ======== */
            this.dialect.isDelimitedIdentifierStart(char) -> {
                // delimited (quoted) identifier
                val s = tokenizeQuotedIdentifier(context, char)
                return s.map { Token.makeWord(it, char) }
            }

            /*  ======== Number && Period ======== */
            CodePoint.inCharRange(char, '0'..'9') || inCharArray(char, '.') -> {
                // special case where if ._ is encountered after a word then that word
                // is a table and the _ is the start of the col name.
                // if the prev token is not a word, then this is not a valid sql
                // word or number.
                if (inCharArray(char, '.') && inCharArray(context.peek(2), '_')) {
                    if (prevToken.isSome { Token.isWord(it) }) {
                        context.next()
                        return Either.Right(Token.KeyWord(KeyWordEnum.PERIOD))
                    }
                    return tokenizerError(context.location(), "Unexpected character '_'")
                }
                // Some dialects support underscore as number separator，(for example `10_000`)
                // There can only be one at a time, and it must be followed by another digit
                var s = peekingNextTakeWhile(context) { ch: CodePoint, nextCh: CodePoint? ->
                    ch.isAsciiDigit() || isNumberSeparator(ch, nextCh)
                }

                if (s == "0" && inCharArray(context.peek(), 'x')) {
                    // match binary literal that starts with 0x
                    context.next()
                    val s2 = peekingNextTakeWhile(context) { ch: CodePoint, nextCh: CodePoint? ->
                        ch.isAsciiHexDigit() || isNumberSeparator(ch, nextCh)
                    }
                    return Either.Right(Token.Literal(LiteralEnum.HexStringLiteral(s2)))
                }
                // match one period
                if (inCharArray(context.peek(), '.')) {
                    s += '.'
                    context.next()
                }
                // If the dialect supports identifiers that start with a numeric prefix (such as tables named `59901_user_login`)
                // and we have now consumed a dot, check if the previous token was a Word.
                // If so, what follows is definitely not part of a decimal number and
                // we should yield the dot as a dedicated token so compound identifiers
                // starting with digits can be parsed correctly.
                if (s == "." && this.dialect.supportsNumericPrefix()) {
                    if (prevToken.isSome { Token.isWord(it) }) {
                        return Either.Right(Token.Symbol(SymbolEnum.Period))
                    }
                }
                // Consume fractional digits.
                s += peekingNextTakeWhile(context) { ch: CodePoint, nextCh: CodePoint? ->
                    ch.isAsciiDigit() || isNumberSeparator(ch, nextCh)
                }

                // No fraction -> Token::Period
                if (s == ".") {
                    return Either.Right(Token.Symbol(SymbolEnum.Period))
                }

                // Parse exponent as number
                var exponentPart = ""
                if (context.peek().toOption().isSome { inCharArray(it, 'e', 'E') }) {
                    val contextSlice = context.slice()
                    exponentPart += (requireNotNull(contextSlice.next()) { "impossible null case" }.toUnicodeChar())

                    // Optional sign
                    if (contextSlice.peek() != null) {
                        val c = requireNotNull(contextSlice.peek()) { "impossible null case" }
                        if (inCharArray(c, '+', '-')) {
                            exponentPart += c.toUnicodeChar()
                            contextSlice.next()
                        }
                    }

                    if (contextSlice.peek() != null) {
                        // Definitely an exponent, get original iterator up to speed and use it
                        if (requireNotNull(contextSlice.peek()).isAsciiDigit()) {
                            exponentPart.indices.forEach { i ->
                                context.next()
                            }

                            exponentPart += peekingTakeWhile(context) { ch: CodePoint -> ch.isAsciiDigit() }
                            s += exponentPart
                        }
                    }
                }

                // If the dialect supports identifiers that start with a numeric prefix,
                // we need to check if the value is in fact an identifier and must thus
                // be tokenized as a word.
                if (this.dialect.supportsNumericPrefix()) {
                    if (exponentPart.isEmpty()) {
                        // If it is not a number with an exponent, it may be an identifier starting with digits.
                        val identifier =
                            peekingTakeWhile(context) { ch: CodePoint -> this.dialect.isIdentifierPart(ch) }
                        if (identifier.isNotEmpty()) {
                            s += identifier
                            return Either.Right(Token.Identifier(s, null))
                        }
                    } else if (prevToken.isSome { it is Token.Symbol && it.value == SymbolEnum.Period }) {
                        // If the previous token was a period, thus not belonging to a number,
                        // the value we have is part of an identifier.
                        return Either.Right(Token.Identifier(s, null))
                    }
                }
                if (dialectIs(this.dialect, Hive::class)) {
                    // See https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27838462#LanguageManualTypes-IntegralTypes(TINYINT,SMALLINT,INT/INTEGER,BIGINT)
                    // Hive Support Number Postfixes
                    // TINYINT  - Y
                    // SMALLINT - S
                    // BIGINT   - L
                    //  e.g. `100Y` -- Represents a TINYINT literal with value 100 (IN HIVE)
                    val ch = context.peek()
                    when {
                        inCharArray(ch, 'L', 'l') -> return Either.Right(
                            Token.Literal(
                                LiteralEnum.Number(
                                    s,
                                    NumberType.BIGINT
                                )
                            )
                        )

                        inCharArray(ch, 'S', 's') -> return Either.Right(
                            Token.Literal(
                                LiteralEnum.Number(
                                    s,
                                    NumberType.SMALLINT
                                )
                            )
                        )

                        inCharArray(ch, 'Y', 'y') -> return Either.Right(
                            Token.Literal(
                                LiteralEnum.Number(
                                    s,
                                    NumberType.TINYINT
                                )
                            )
                        )

                        else -> {}
                    }
                }
                // Number without declared Type
                return Either.Right(Token.Literal(LiteralEnum.Number(s, null)))
            }
            /*  ======== punctuation ======== */
            inCharArray(char, '(') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.LParen))
            inCharArray(char, ')') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.RParen))
            inCharArray(char, ',') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Comma))
            /*  ======== Operator ======== */
            inCharArray(char, '-') -> {
                context.next() // consume the '-'
                val ch = context.peek()
                when {
                    inCharArray(ch, '-') -> {
                        var isComment = true
                        if (this.dialect.requiresSingleLineCommentWhitespace()) {
                            isComment = context.peek(2).toOption().isSome { inCharArray(it, ' ') }
                        }
                        if (isComment) {
                            context.next() // consume second '-'
                            val comment = tokenizeSingleLineComment(context)
                            return comment.map { Token.WhiteSpace(WhiteSpaceEnum.SingleLineComment("--", it)) }
                        }
                        this.tokenizeBinaryOperator(context, "-", Token.Symbol(SymbolEnum.Minus))
                    }

                    inCharArray(ch, '>') -> {
                        context.next()
                        val ch = context.peek()
                        if (ch.toOption().isSome { inCharArray(it, '>') }) {
                            this.consumerForCustomBinaryOperator(context, "->>", Token.Symbol(SymbolEnum.LongArrow))
                        } else {
                            this.tokenizeBinaryOperator(context, "->", Token.Symbol(SymbolEnum.Arrow))
                        }
                    }

                    else -> {
                        // a regular '-' operator
                        this.tokenizeBinaryOperator(context, "-", Token.Symbol(SymbolEnum.Minus))
                    }
                }
            }

            inCharArray(char, '/') -> {
                context.next() // consume the '/'
                val ch = context.peek()
                when {
                    inCharArray(ch, '*') -> {
                        context.next() // consume the '*', starting a multi-line comment
                        tokenizeMultilineComment(context)
                    }
                    // a regular '/' operator
                    else -> Either.Right(Token.Symbol(SymbolEnum.Div))
                }
            }

            inCharArray(char, '+') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Plus))
            inCharArray(char, '*') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Mul))
            inCharArray(char, '%') -> {
                context.next() // advance past '%'
                val ch = context.peek()
                when {
                    ch != null && ch.isWhitespace() -> {
                        Either.Right(Token.Symbol(SymbolEnum.Mod))
                    }

                    ch != null && this.dialect.isIdentifierStart(CodePoint.fromChar('%')) -> {
                        tokenizeIdentifierOrKeyword(context, arrayOf(char, ch))
                    }

                    else -> {
                        tokenizeBinaryOperator(context, "%", Token.Symbol(SymbolEnum.Mod))
                    }
                }
            }

            inCharArray(char, '|') -> {
                context.next() // consume the '|'
                val ch = context.peek()
                when {
                    inCharArray(ch, '/') -> {
                        consumerForCustomBinaryOperator(context, "|/", Token.Symbol(SymbolEnum.PGSquareRoot))
                    }

                    inCharArray(ch, '|') -> {
                        context.next() // consume second '|'
                        val c = context.peek()
                        when {
                            inCharArray(c, '/') -> {
                                consumerForCustomBinaryOperator(context, "||/", Token.Symbol(SymbolEnum.PGCubeRoot))
                            }

                            else -> {
                                tokenizeBinaryOperator(context, "||", Token.Symbol(SymbolEnum.StringConcat))
                            }

                        }
                    }

                    inCharArray(ch, '&') && this.dialect.supportsGeometricTypes() -> {
                        context.next() // consume
                        val c = context.peek()
                        when {
                            inCharArray(c, '>') -> consumerForCustomBinaryOperator(
                                context,
                                "|&>",
                                Token.Symbol(SymbolEnum.VerticalBarAmpersandRightAngleBracket)
                            )

                            else -> tokenizeBinaryOperatorWithOpt(context, "|&", None)
                        }
                    }

                    inCharArray(ch, '>') && this.dialect.supportsGeometricTypes() -> {
                        context.next() // consume
                        val c = context.peek()
                        when {
                            inCharArray(c, '>') -> consumerForCustomBinaryOperator(
                                context, "|>>", Token.Symbol(SymbolEnum.VerticalBarShiftRight)
                            )

                            else -> tokenizeBinaryOperatorWithOpt(context, "|>", None)
                        }
                    }

                    inCharArray(ch, '>') && this.dialect.supportsPipeOperator() -> {
                        consumerForCustomBinaryOperator(
                            context,
                            "|>",
                            Token.Symbol(SymbolEnum.VerticalBarRightAngleBracket)
                        )
                    }

                    else -> tokenizeBinaryOperator(context, "|", Token.Symbol(SymbolEnum.Pipe))
                }
            }

            inCharArray(char, '=') -> {
                context.next() // consume
                val ch = context.peek()
                when {
                    inCharArray(ch, '>') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.RArrow))
                    inCharArray(ch, '=') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.DoubleEq))
                    else -> Either.Right(Token.Symbol(SymbolEnum.Eq))
                }
            }

            inCharArray(char, '!') -> {
                context.next() // consume
                val ch = context.peek()
                when {
                    inCharArray(ch, '=') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.NEqAlias))
                    inCharArray(ch, '!') -> consumeAndReturn(
                        context,
                        Token.Symbol(SymbolEnum.DoubleExclamationMark)
                    )

                    inCharArray(ch, '~') -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '*') -> consumeAndReturn(
                                context,
                                Token.Symbol(SymbolEnum.ExclamationMarkTildeAsterisk)
                            )

                            inCharArray(c, '~') -> {
                                context.next()
                                val cc = context.peek()
                                when {
                                    inCharArray(cc, '*') -> consumeAndReturn(
                                        context,
                                        Token.Symbol(SymbolEnum.ExclamationMarkDoubleTildeAsterisk)
                                    )

                                    else -> Either.Right(Token.Symbol(SymbolEnum.ExclamationMarkDoubleTilde))
                                }
                            }

                            else -> Either.Right(Token.Symbol(SymbolEnum.ExclamationMarkTilde))
                        }
                    }

                    else -> {
                        Either.Right(Token.Symbol(SymbolEnum.ExclamationMark))
                    }
                }
            }

            inCharArray(char, '<') -> {
                context.next() // consume
                val ch = context.peek()
                when {
                    inCharArray(ch, '=') -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '>') -> consumerForCustomBinaryOperator(
                                context,
                                "<=>",
                                Token.Symbol(SymbolEnum.Spaceship)
                            )

                            else -> tokenizeBinaryOperator(context, "<=", Token.Symbol(SymbolEnum.LtEq))
                        }
                    }

                    inCharArray(ch, '>') -> consumerForCustomBinaryOperator(
                        context,
                        "<>",
                        Token.Symbol(SymbolEnum.NEq)
                    )

                    inCharArray(ch, '<') && this.dialect.supportsGeometricTypes() -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '|') -> consumerForCustomBinaryOperator(
                                context,
                                "<<|",
                                Token.Symbol(SymbolEnum.ShiftLeftVerticalBar)
                            )

                            else -> tokenizeBinaryOperator(context, "<<", Token.Symbol(SymbolEnum.ShiftLeft))
                        }
                    }

                    inCharArray(ch, '<') -> tokenizeBinaryOperator(
                        context,
                        "<<",
                        Token.Symbol(SymbolEnum.ShiftLeft)
                    )

                    inCharArray(ch, '-') && (this.dialect.supportsGeometricTypes()) -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '>') -> consumerForCustomBinaryOperator(
                                context,
                                "<->",
                                Token.Symbol(SymbolEnum.TwoWayArrow)
                            )

                            else -> tokenizeBinaryOperatorWithOpt(context, "<-", None)
                        }
                    }

                    inCharArray(ch, '^') && this.dialect.supportsGeometricTypes() -> {
                        consumerForCustomBinaryOperator(context, "<^", Token.Symbol(SymbolEnum.LeftAngleBracketCaret))
                    }

                    inCharArray(ch, '@') -> consumerForCustomBinaryOperator(
                        context,
                        "<@",
                        Token.Symbol(SymbolEnum.ArrowAt)
                    )

                    else -> tokenizeBinaryOperator(context, "<", Token.Symbol(SymbolEnum.Lt))
                }
            }

            inCharArray(char, '>') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '=') -> consumerForCustomBinaryOperator(
                        context,
                        ">=",
                        Token.Symbol(SymbolEnum.GtEq)
                    )

                    inCharArray(ch, '>') -> consumerForCustomBinaryOperator(
                        context,
                        ">>",
                        Token.Symbol(SymbolEnum.ShiftRight)
                    )

                    inCharArray(
                        ch,
                        '^'
                    ) && this.dialect.supportsGeometricTypes() -> consumerForCustomBinaryOperator(
                        context,
                        ">^",
                        Token.Symbol(SymbolEnum.ShiftRight)
                    )

                    else -> tokenizeBinaryOperator(context, ">", Token.Symbol(SymbolEnum.Gt))

                }
            }

            inCharArray(char, ':') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, ':') -> consumeAndReturn(
                        context,
                        Token.Symbol(SymbolEnum.DoubleColon)
                    )

                    inCharArray(ch, '=') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Assignment))
                    else -> Either.Right(Token.Symbol(SymbolEnum.Colon))
                }
            }

            inCharArray(char, ';') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Semicolon))
            inCharArray(char, '\\') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Backslash))
            inCharArray(char, '[') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.LBracket))
            inCharArray(char, ']') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.RBracket))
            inCharArray(char, '&') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '>') && this.dialect.supportsGeometricTypes() -> {
                        context.next()
                        consumeAndReturn(context, Token.Symbol(SymbolEnum.AmpersandRightAngleBracket))
                    }

                    inCharArray(ch, '<') && this.dialect.supportsGeometricTypes() -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '|') -> consumeAndReturn(
                                context,
                                Token.Symbol(SymbolEnum.AmpersandLeftAngleBracketVerticalBar)
                            )

                            else -> tokenizeBinaryOperator(
                                context,
                                "&<", Token.Symbol(SymbolEnum.AmpersandLeftAngleBracket)
                            )
                        }
                    }

                    inCharArray(ch, '&') -> {
                        context.next() // consume the second '&'
                        tokenizeBinaryOperator(context, "&&", Token.Symbol(SymbolEnum.Overlap))
                    }
                    // Bitshift '&' operator
                    else -> tokenizeBinaryOperator(context, "&", Token.Symbol(SymbolEnum.Ampersand))
                }
            }

            inCharArray(char, '^') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '@') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.CaretAt))
                    else -> Either.Right(Token.Symbol(SymbolEnum.Caret))
                }
            }

            inCharArray(char, '{') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.LBrace))
            inCharArray(char, '}') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.RBrace))
            inCharArray(char, '#') && dialectIn(this.dialect, MySQL::class, Hive::class) -> {
                context.next()
                val comment = tokenizeSingleLineComment(context)
                return comment.map { Token.WhiteSpace(WhiteSpaceEnum.SingleLineComment("#", it)) }
            }

            inCharArray(char, '~') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '*') -> consumerForCustomBinaryOperator(
                        context,
                        "~*",
                        Token.Symbol(SymbolEnum.TildeAsterisk)
                    )

                    inCharArray(
                        ch,
                        '='
                    ) && this.dialect.supportsGeometricTypes() -> consumerForCustomBinaryOperator(
                        context,
                        "~=",
                        Token.Symbol(SymbolEnum.TildeEq)
                    )

                    inCharArray(ch, '~') -> {
                        context.next()
                        val ch = context.peek()
                        when {
                            inCharArray(ch, '*') -> consumerForCustomBinaryOperator(
                                context,
                                "~~*",
                                Token.Symbol(SymbolEnum.DoubleTildeAsterisk)
                            )

                            else -> tokenizeBinaryOperator(context, "~~", Token.Symbol(SymbolEnum.DoubleTilde))
                        }
                    }

                    else -> tokenizeBinaryOperator(context, "~", Token.Symbol(SymbolEnum.Tilde))
                }
            }

            inCharArray(char, '#') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '-') -> consumerForCustomBinaryOperator(
                        context,
                        "#-",
                        Token.Symbol(SymbolEnum.HashMinus)
                    )

                    inCharArray(ch, '>') -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '>') -> consumerForCustomBinaryOperator(
                                context,
                                "#>>",
                                Token.Symbol(SymbolEnum.HashLongArrow)
                            )

                            else -> tokenizeBinaryOperator(context, "#>", Token.Symbol(SymbolEnum.HashArrow))
                        }
                    }

                    inCharArray(ch, ' ') -> Either.Right(Token.Symbol(SymbolEnum.Sharp))
                    inCharArray(
                        ch,
                        '#'
                    ) && this.dialect.supportsGeometricTypes() -> consumerForCustomBinaryOperator(
                        context,
                        "##",
                        Token.Symbol(SymbolEnum.DoubleSharp)
                    )

                    ch != null && this.dialect.isIdentifierStart(CodePoint.fromChar('#')) ->
                        tokenizeIdentifierOrKeyword(context, arrayOf(char, ch))

                    else -> tokenizeBinaryOperator(context, "#", Token.Symbol(SymbolEnum.Sharp))
                }
            }

            inCharArray(char, '@') -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '@') && this.dialect.supportsGeometricTypes() -> consumeAndReturn(
                        context,
                        Token.Symbol(SymbolEnum.AtAt)
                    )

                    inCharArray(ch, '-') && this.dialect.supportsGeometricTypes() -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '@') -> consumeAndReturn(
                                context,
                                Token.Symbol(SymbolEnum.AtDashAt)
                            )

                            else -> tokenizeBinaryOperatorWithOpt(context, "@-", None)
                        }
                    }

                    inCharArray(ch, '>') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.AtArrow))
                    inCharArray(ch, '?') -> consumeAndReturn(context, Token.Symbol(SymbolEnum.AtQuestion))
                    inCharArray(ch, '@') -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, ' ') -> consumeAndReturn(
                                context,
                                Token.Symbol(SymbolEnum.AtAt)
                            )

                            c != null && this.dialect.isIdentifierStart(CodePoint.fromChar('@')) ->
                                tokenizeIdentifierOrKeyword(
                                    context, arrayOf(
                                        CodePoint.fromChar('@'), CodePoint.fromChar('@'), c
                                    )
                                )

                            else -> Either.Right(Token.Symbol(SymbolEnum.AtAt))
                        }
                    }

                    inCharArray(ch, ' ') -> Either.Right(Token.Symbol(SymbolEnum.AtSign))
                    // We break on quotes here, because no dialect allows identifiers starting
                    // with @ and containing quotation marks (e.g. `@'foo'`) unless they are
                    // quoted, which is tokenized as a quoted string, not here (e.g.
                    // `"@'foo'"`). Further, at least two dialects parse `@` followed by a
                    // quoted string as two separate tokens, which this allows. For example,
                    // Postgres parses `@'1'` as the absolute value of '1' which is implicitly
                    // cast to a numeric type. And when parsing MySQL-style grantees (e.g.
                    // `GRANT ALL ON *.* to 'root'@'localhost'`), we also want separate tokens
                    // for the user, the `@`, and the host.
                    inCharArray(ch, '\'') -> Either.Right(Token.Symbol(SymbolEnum.AtSign))
                    inCharArray(ch, '"') -> Either.Right(Token.Symbol(SymbolEnum.AtSign))
                    inCharArray(ch, '`') -> Either.Right(Token.Symbol(SymbolEnum.AtSign))
                    ch != null && this.dialect.isIdentifierStart(CodePoint.fromChar('@')) ->
                        tokenizeIdentifierOrKeyword(context, arrayOf(char, ch))

                    else -> Either.Right(Token.Symbol(SymbolEnum.AtSign))
                }
            }
            // Postgres uses ? for jsonb operators, not prepared statements
            inCharArray(char, '?') && this.dialect.supportsGeometricTypes() -> {
                context.next()
                val ch = context.peek()
                when {
                    inCharArray(ch, '|') -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '|') -> consumeAndReturn(
                                context,
                                Token.Symbol(SymbolEnum.QuestionMarkDoubleVerticalBar)
                            )

                            else -> Either.Right(Token.Symbol(SymbolEnum.QuestionPipe))
                        }
                    }

                    inCharArray(ch, '&') -> consumeAndReturn(
                        context,
                        Token.Symbol(SymbolEnum.QuestionAnd)
                    )

                    inCharArray(ch, '-') -> {
                        context.next()
                        val c = context.peek()
                        when {
                            inCharArray(c, '|') -> consumeAndReturn(
                                context,
                                Token.Symbol(SymbolEnum.QuestionMarkDashVerticalBar)
                            )

                            else -> Either.Right(Token.Symbol(SymbolEnum.QuestionMarkDash))
                        }
                    }

                    inCharArray(ch, '#') -> consumeAndReturn(
                        context,
                        Token.Symbol(SymbolEnum.QuestionMarkSharp)
                    )

                    else -> consumeAndReturn(context, Token.Symbol(SymbolEnum.Question))
                }
            }

            inCharArray(char, '?') -> {
                context.next()
                val s = peekingTakeWhile(context) { ch: CodePoint -> ch.isNumeric() }
                return Either.Right(Token.Placeholder(s))
            }
            /*  ======== Identifier or KeyWord ======== */
            this.dialect.isIdentifierStart(char) -> tokenizeIdentifierOrKeyword(context, arrayOf(char))
            equalsChar(char, '$') -> tokenizeDollarPrecededValue(context)
            // whitespace check (including unicode chars) should be last as it covers some of the chars above
            char.isWhitespace() -> consumeAndReturn(context, Token.WhiteSpace(WhiteSpaceEnum.Space))
            /*  ======== IllegalChar ======== */
            else -> consumeAndReturn(context, Token.IllegalChar(char.toUnicodeChar()))
        }
    }

    /**
     *
     */
    fun tokenizeQuotedIdentifier(context: TokenizerContext, quoteStart: CodePoint): Either<TokenizerError, String> {
        val error = context.location()
        context.next() // consume the opening quote
        val quoteEnd = Token.matchingIdentifierEndQuote(quoteStart)
        if (quoteEnd.isLeft()) {
            return Either.Left(TokenizerError(error, requireNotNull(quoteEnd.leftOrNull()).message))
        }
        val quoteEndSome = requireNotNull(quoteEnd.getOrNull())
        val pair = parseQuotedIdent(context, quoteEndSome)
        if (pair.second.isSome { it == quoteEndSome }) {
            return Either.Right(pair.first)
        }
        return tokenizerError(error, "Expected close delimiter '$quoteEndSome' before EOF.")
    }

    fun parseQuotedIdent(context: TokenizerContext, quoteEnd: CodePoint): Pair<String, Option<CodePoint>> {
        var lastChar: Option<CodePoint> = None
        val s = StringBuilder()
        while (context.peek() != null) {
            val ch = context.next()
            if (ch == quoteEnd) {
                if (context.peek() == quoteEnd) {
                    context.next()
                    s.append(ch.toUnicodeChar())
                    if (!this.unescape) {
                        // In no-escape mode, the given query has to be saved completely
                        s.append(ch.toUnicodeChar())
                    }
                } else {
                    lastChar = Some(quoteEnd)
                    break
                }

            } else {
                s.append(requireNotNull(ch).toUnicodeChar())
            }
        }
        return Pair(s.toString(), lastChar)
    }

    /**
     * tokenize an identifier or keyword, with the first char is peeked.
     */
    fun tokenizeIdentifierOrKeyword(context: TokenizerContext, chars: Array<CodePoint>): Either<TokenizerError, Token> {
        // consume the first char
        context.next()
        val prefixChars = String.fromUnicodeCodePoints(chars)
        val word = tokenizeWord(context, prefixChars)
        return Either.Right(Token.makeWord(word, null))
    }

    /**
     * Tokenize an identifier or keyword, after the first char is already consumed.
     */
    fun tokenizeWord(context: TokenizerContext, prefixChars: String): String {
        val w = StringBuilder(prefixChars)
        w.append(peekingTakeWhile(context) { ch: CodePoint -> this.dialect.isIdentifierPart(ch) })
        return w.toString()
    }

    fun tokenizeSingleCharQuotedString(
        context: TokenizerContext,
        quoteStyle: Char,
        backSlashEscape: Boolean
    ): Either<TokenizerError, String> = tokenizeQuotedString(
        context, TokenizeQuotedStringSettings(
            quoteStyle,
            numQuoteChars = NumStringQuoteChars.One,
            numOpeningQuotesToConsume = 1,
            backslashEscape = backSlashEscape
        )
    )

    fun tokenizeQuotedString(
        context: TokenizerContext,
        settings: TokenizeQuotedStringSettings
    ): Either<TokenizerError, String> {
        val s = StringBuilder()
        val errorLocation = context.location()
        (0 until settings.numOpeningQuotesToConsume).forEach { i ->
            if (!inCharArray(context.next(), settings.quoteStyle)) {
                return tokenizerError(
                    errorLocation,
                    "Invalid string literal opening, Expected ${settings.quoteStyle} quote character"
                )
            }
        }
        var consecutiveQuotesNum = 0
        while (context.peek() != null) {
            val char = requireNotNull(context.peek()) { "impossible null case" }
            // whether the current character maybe the last quote character
            val pendingFinalQuote = when (settings.numQuoteChars) {
                NumStringQuoteChars.One -> {
                    Some(NumStringQuoteChars.One)
                }

                is NumStringQuoteChars.Many -> {
                    if (settings.numQuoteChars.count == consecutiveQuotesNum + 1) {
                        Some(NumStringQuoteChars.Many(consecutiveQuotesNum + 1))
                    } else {
                        None
                    }
                }
            }
            when {
                inCharArray(char, settings.quoteStyle) && pendingFinalQuote.isSome() -> {
                    // consume the last quote character
                    context.next()
                    if (pendingFinalQuote.isSome { it is NumStringQuoteChars.Many }) {
                        // For an initial string like `"""abc"""`, at this point we have
                        // `abc""` in the buffer and have now matched the final `"`.
                        // However, the string to return is simply `abc`, so we strip off
                        // the trailing quotes before returning.
                        val pendingFinalQuoteSome =
                            requireNotNull(pendingFinalQuote.getOrNull()) { "impossible null case" }
                        s.append(
                            s.substring(
                                0,
                                (s.length - (pendingFinalQuoteSome as NumStringQuoteChars.Many).count) + 1
                            )
                        )
                        return Either.Right(s.toString())
                    } else if (inCharArray(context.peek(), settings.quoteStyle)) {
                        // escape case. for example `"""a"`
                        s.append(char.toUnicodeChar())
                        if (!this.unescape) {
                            // In no-escape mode, the given query has to be saved completely
                            s.append(char.toUnicodeChar())
                        }
                        context.next()
                    } else {
                        return Either.Right(s.toString())
                    }
                }

                inCharArray(char, '\\') && settings.backslashEscape -> {
                    // consume backslash
                    context.next()
                    consecutiveQuotesNum = 0

                    if (context.peek() != null) {
                        if (!this.unescape) {
                            // In no-escape mode, the given query has to be saved completely
                            // including backslashes. Similarly, with ignore_like_wildcard_escapes,
                            // the backslash is not stripped.
                            s.append(char.toUnicodeChar())
                            // consume and record next
                            s.append(requireNotNull(context.next()) { "impossible null case" }.toUnicodeChar())
                        } else {
                            val next = requireNotNull(context.peek()) { "impossible null case" }
                            val n: String = when (next) {
                                CodePoint.fromChar('0') -> "\u0000"
                                CodePoint.fromChar('a') -> "\u0007"
                                CodePoint.fromChar('b') -> "\u0008"
                                CodePoint.fromChar('f') -> "\u000C"
                                CodePoint.fromChar('n') -> "\n"
                                CodePoint.fromChar('r') -> "\r"
                                CodePoint.fromChar('t') -> "\t"
                                CodePoint.fromChar('Z') -> "\u001a"
                                else -> next.toUnicodeChar()
                            }
                            s.append(n)
                            // consume next
                            context.next()
                        }
                    }
                }

                else -> {
                    // consume common char
                    context.next()
                    if (inCharArray(char, settings.quoteStyle)) {
                        consecutiveQuotesNum += 1
                    } else {
                        consecutiveQuotesNum = 0
                    }
                    s.append(char.toUnicodeChar())
                }
            }
        }
        return tokenizerError(errorLocation, "Unterminated string literal")
    }

    /**
     * Consume characters until newline
     */
    fun tokenizeSingleLineComment(context: TokenizerContext): Either<TokenizerError, String> {
        var comment = peekingTakeWhile(context) { ch: CodePoint ->
            when {
                inCharArray(ch, '\n') -> false // Always stop at \n
                inCharArray(ch, '\r') && dialectIs(
                    this.dialect,
                    PostgreSQL::class
                ) -> false // Stop at \r for Postgres
                else -> true // Keep consuming for other characters
            }
        }
        val errorLocation = context.location()
        val ch = context.next().toOption()
        if (ch.isSome()) {
            val c: CodePoint = requireNotNull(ch.getOrNull()) { "impossible null case" }
            if (inCharArray(c, '\n', '\r')) {
                comment = comment + ch
            } else {
                return Either.Left(
                    TokenizerError(
                        location = errorLocation,
                        message = "Single Line Comment must end with \\n or \\r"
                    )
                )
            }
        }
        return Either.Right(comment)
    }

    /**
     * Consume the next character, then parse a custom binary operator. The next character should be included in the prefix
     */
    fun consumerForCustomBinaryOperator(
        context: TokenizerContext,
        prefix: String,
        default: Token
    ): Either<TokenizerError, Token> {
        context.next() // consume the first char
        return tokenizeBinaryOperatorWithOpt(context, prefix, Some(default))
    }

    /**
     * parse a custom binary operator
     */
    fun tokenizeBinaryOperator(
        context: TokenizerContext,
        prefix: String,
        default: Token
    ): Either<TokenizerError, Token> {
        return tokenizeBinaryOperatorWithOpt(context, prefix, Some(default))
    }

    /**
     * parse a custom binary operator
     */
    fun tokenizeBinaryOperatorWithOpt(
        context: TokenizerContext,
        prefix: String,
        default: Option<Token>
    ): Either<TokenizerError, Token> {
        var custom: Option<String> = None
        while (context.peek() != null) {
            val ch = requireNotNull(context.peek()) { "impossible null case" }
            if (!this.dialect.isCustomOperatorPart(ch)) {
                break
            }
            custom = if (custom.isNone()) {
                Some(prefix.plus(ch))
            } else {
                custom.map { it: String -> it.plus(ch) }
            }
            context.next()
        }
        return when {
            custom.isSome() -> {
                Either.Right(Token.Symbol(SymbolEnum.CustomBinaryOperator(requireNotNull(custom.getOrNull()) { "impossible null case" })))
            }

            default.isSome() -> {
                Either.Right(requireNotNull(default.getOrNull()) { "impossible null case" })
            }

            else -> {
                this.tokenizerError(context.location(), "Expected a valid binary operator after '{prefix}'")
            }
        }
    }

    fun tokenizeEscapedSingleQuotedString(context: TokenizerContext, loc: Location): Either<TokenizerError, String> =
        unescapeSingleQuotedString(context).toEither { TokenizerError(loc, "Unterminated string literal") }

    fun tokenizeMultilineComment(context: TokenizerContext): Either<TokenizerError, Token> {
        val sb = StringBuilder()
        var nested = 1
        val supportsNestedComments = this.dialect.supportsNestedComments()
        while (true) {
            val ch: CodePoint? = context.next()
            when {
                inCharArray(ch, '/') && inCharArray(
                    context.peek(),
                    '*'
                ) && supportsNestedComments -> {
                    context.next() // consume '*'
                    sb.append('/').append('*')
                    nested += 1
                }

                inCharArray(ch, '*') && inCharArray(context.peek(), '/') -> {
                    context.next() //consume '/'
                    nested -= 1
                    if (nested == 0) {
                        return Either.Right(
                            Token.WhiteSpace(
                                WhiteSpaceEnum.MultiLineComment(
                                    "/*",
                                    sb.toString(),
                                    "*/"
                                )
                            )
                        )
                    }
                    sb.append('*').append('/')
                }

                ch != null -> {
                    sb.append(ch.toUnicodeChar())
                }

                else -> {
                    return tokenizerError(context.location(), "Unexpected EOF while in a multi-line comment")
                }
            }
        }
    }

    fun tokenizeDollarPrecededValue(context: TokenizerContext): Either<TokenizerError, Token> {
        val sb = StringBuilder()
        val value = StringBuilder()

        context.next()
        // If the dialect does not support dollar-quoted strings, then `$$` is rather a placeholder.
        if (inCharArray(context.peek(), '$') && !this.dialect.supportsDollarPlaceholder()) {
            context.next()
            var isTerminated = false
            var prevChar: Option<CodePoint> = None
            while (context.peek() != null) {
                val ch = requireNotNull(context.peek()) { "impossible null case" }
                if (prevChar.isSome { inCharArray(it, '$') }) {
                    if (inCharArray(ch, '$')) {
                        context.next()
                        isTerminated = true
                        break
                    } else {
                        sb.append('$')
                        sb.append(ch.toUnicodeChar())
                    }
                } else if (!inCharArray(ch, '$')) {
                    sb.append(ch.toUnicodeChar())
                }
                prevChar = Some(ch)
                context.next()
            }
            return if (context.peek() != null && !isTerminated) {
                tokenizerError(context.location(), "Unterminated dollar-quoted string literal")
            } else {
                Either.Right(Token.Literal(LiteralEnum.DollarQuotedStringLiteral(sb.toString(), None)))
            }
        } else {
            value.append(peekingTakeWhile(context, {
                it.isAlphanumeric()
                        || inCharArray(it, '_')
                        // Allow $ as a placeholder character if the dialect supports it
                        || (inCharArray(it, '$') && this.dialect.supportsDollarPlaceholder())
            }))
            // If the dialect does not support dollar-quoted strings, don't look for the end delimiter.
            if (inCharArray(context.peek(), '$') && !this.dialect.supportsDollarPlaceholder()) {
                context.next()
                val temp = StringBuilder()
                val endDelimiter = "$${value}$"
                while (true) {
                    val ch = context.next()
                    when {
                        ch != null -> {
                            temp.append(ch.toUnicodeChar())
                            if (temp.endsWith(endDelimiter)) {
                                val result = temp.removeSuffix(endDelimiter)
                                sb.append(result)
                                break
                            }
                        }

                        else -> {
                            if (temp.endsWith(endDelimiter)) {
                                val result = temp.removeSuffix(endDelimiter)
                                sb.append(result)
                                break
                            }
                            return tokenizerError(context.location(), "Unterminated dollar-quoted, expected $")
                        }
                    }
                }
            } else {
                return Either.Right(Token.Placeholder("$$value"))
            }
        }
        return Either.Right(
            Token.Literal(
                LiteralEnum.DollarQuotedStringLiteral(
                    text = sb.toString(),
                    tag = if (value.isEmpty()) {
                        None
                    } else {
                        Some(value.toString())
                    }
                )
            )
        )
    }

    fun unescapeSingleQuotedString(context: TokenizerContext): Option<String> = Unescape(context).unescape()

    /**
     * can not unescape unicode raw string literal here , because UESCAPE char is not known now.
     */
    fun rawUnicodeSingleQuotedString(context: TokenizerContext): Either<TokenizerError, String> = either {
        val unescaped = StringBuilder()
        context.next() // consume the opening quote
        while (context.peek() != null) {
            val c = requireNotNull(context.next()) { "impossible null case" }
            when {
                inCharArray(c, '\'') -> {
                    if (inCharArray(context.peek(), '\'')) {
                        context.next()
                        unescaped.append('\'')
                    } else {
                        return Either.Right(unescaped.toString())
                    }
                }

                else -> {
                    unescaped.append(c.toUnicodeChar())
                }
            }
        }
        return tokenizerError(context.location(), "Unterminated unicode raw string literal")
    }

    fun <R> tokenizerError(loc: Location, message: String): Either<TokenizerError, R> {
        return Either.Left(TokenizerError(loc, message))
    }

    fun consumeAndReturn(context: TokenizerContext, token: Token): Either<TokenizerError, Token> {
        context.next()
        return Either.Right(token)
    }

    /**
     * Read from `context` until `predicate` returns `false` or EOF is hit.
     * Return the characters read as String, and keep the first non-matching
     * char available as `context.next()`.
     */
    fun peekingTakeWhile(context: TokenizerContext, predicate: (CodePoint) -> Boolean): String {
        val s = StringBuilder()
        while (true) {
            val ch = context.peek() ?: break
            if (predicate(ch)) {
                context.next()
                s.append(ch.toUnicodeChar())
            } else {
                break
            }
        }
        return s.toString()
    }

    /**
     * Same as peeking_take_while, but also passes the next character to the predicate.
     */
    fun peekingNextTakeWhile(context: TokenizerContext, predicate: (CodePoint, CodePoint?) -> Boolean): String {
        val s = StringBuilder()
        while (true) {
            val ch = context.peek() ?: break
            val nextCh = context.peek(2)
            if (predicate(ch, nextCh)) {
                context.next()
                s.append(ch.toUnicodeChar())
            } else {
                break
            }
        }
        return s.toString()
    }

    fun isNumberSeparator(char: CodePoint, nextChar: CodePoint?): Boolean {
        return this.dialect.supportsNumericLiteralUnderscores()
                && CodePoint.equalsChar(char, '_')
                && nextChar.toOption().isSome { it.isAsciiHexDigit() }
    }
}