package io.github.chutian0610.sqlparser.token

import arrow.core.Either
import arrow.core.Some
import io.github.chutian0610.sqlparser.Location
import io.github.chutian0610.sqlparser.TestCase
import io.github.chutian0610.sqlparser.allDialectsWhen
import io.github.chutian0610.sqlparser.dialect.ClickHouse
import io.github.chutian0610.sqlparser.dialect.GenericDialect
import io.kotest.core.spec.style.FunSpec
import io.kotest.datatest.withData
import io.kotest.matchers.shouldBe

class TokenizerTest : FunSpec({

    test("tokenize select 1") {
        val sql = "SELECT 1"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1", null))
        )
    }
    test("tokenize select .1") {
        val sql = "SELECT .1"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number(".1", null))
        )
    }
    test("tokenize double equal") {
        val sql = "SELECT foo=='1'"
        val tokenizer = Tokenizer.new(ClickHouse())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Identifier("foo", null),
            Token.Symbol(SymbolEnum.DoubleEq),
            Token.Literal(LiteralEnum.SingleQuotedStringLiteral("1"))
        )
    }
    test("tokenize numeric literal not support underscore") {
        val sql = "SELECT 10_000"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("10", null)),
            Token.makeWord("_000", null),
        )
    }

    test("tokenize numeric literal support underscore") {
        allDialectsWhen { it.supportsNumericLiteralUnderscores() }.tokenizesExpect(
            "SELECT 10_000, _10_000, 10_00_, 10___0", listOf(
                Token.makeKeyWord("SELECT"),
                Token.WhiteSpace(WhiteSpaceEnum.Space),
                Token.Literal(LiteralEnum.Number("10_000", null)),
                Token.Symbol(SymbolEnum.Comma),
                Token.WhiteSpace(WhiteSpaceEnum.Space),
                // leading underscore tokenizes as a word (parsed as column identifier)
                Token.makeWord("_10_000", null),
                Token.Symbol(SymbolEnum.Comma),
                Token.WhiteSpace(WhiteSpaceEnum.Space),
                Token.Literal(LiteralEnum.Number("10_00", null)),
                // trailing underscores tokenizes as a word (syntax error in some dialects)
                Token.makeWord("_", null),
                Token.Symbol(SymbolEnum.Comma),
                Token.WhiteSpace(WhiteSpaceEnum.Space),
                Token.Literal(LiteralEnum.Number("10", null)),
                // multiple underscores tokenizes as a word (syntax error in some dialects)
                Token.makeWord("___0", null),
            )
        )
    }

    test("tokenize exponent") {
        val sql = "SELECT 1e10, 1e-10, 1e+10, 1ea, 1e-10a, 1e-10-10"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1e10", null)),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1e-10", null)),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1e+10", null)),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1", null)),
            Token.makeWord("ea", null),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1e-10", null)),
            Token.makeWord("a", null),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1e-10", null)),
            Token.Symbol(SymbolEnum.Minus),
            Token.Literal(LiteralEnum.Number("10", null)),
        )
    }

    test("tokenize scalar function") {
        val sql = "SELECT sqrt(1)"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("sqrt", null),
            Token.Symbol(SymbolEnum.LParen),
            Token.Literal(LiteralEnum.Number("1", null)),
            Token.Symbol(SymbolEnum.RParen),
        )
    }
    test("tokenize string string concat") {
        val sql = "SELECT 'a' || 'b'"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.SingleQuotedStringLiteral("a")),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.StringConcat),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.SingleQuotedStringLiteral("b")),
        )
    }
    test("tokenize bitwise op") {
        val sql = "SELECT one | two ^ three"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("one", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Pipe),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("two", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Caret),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("three", null)
        )
    }
    test("tokenize logical xor") {
        val sql = "SELECT true XOR true, false XOR false, true XOR false, false XOR true"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("true"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("XOR"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("true"),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("false"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("XOR"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("false"),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("true"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("XOR"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("false"),
            Token.Symbol(SymbolEnum.Comma),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("false"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("XOR"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("true")
        )
    }

    test("tokenize simple select") {
        val sql = "SELECT * FROM customer WHERE id = 1 LIMIT 5"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Mul),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("FROM"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("customer", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("WHERE"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("id", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Eq),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1", null)),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("LIMIT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("5", null)),
        )
    }
    test("tokenize explain select") {
        val sql = "EXPLAIN SELECT * FROM customer WHERE id = 1"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("EXPLAIN"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Mul),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("FROM"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("customer", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("WHERE"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("id", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Eq),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1", null))
        )
    }

    test("tokenize explain analyze select") {
        val sql = "EXPLAIN ANALYZE SELECT * FROM customer WHERE id = 1"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("EXPLAIN"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("ANALYZE"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Mul),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("FROM"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("customer", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("WHERE"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("id", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Eq),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.Number("1", null))
        )
    }

    test("tokenize string predicate") {
        val sql = "SELECT * FROM customer WHERE salary != 'Not Provided'"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Mul),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("FROM"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("customer", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("WHERE"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeWord("salary", null),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.NEqAlias),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Literal(LiteralEnum.SingleQuotedStringLiteral("Not Provided"))
        )
    }

    test("tokenize invalid string") {
        // warning: emoji order in file is not as shown in idea.
        // use other editor to check emoji order
        val sql = "\n💝مصطفىh"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.WhiteSpace(WhiteSpaceEnum.NewLine),
            Token.IllegalChar("💝"),
            Token.makeWord("مصطفىh", null),
        )
    }

    test("tokenize newline in string literal") {
        val sql = "'foo\r\nbar\nbaz'"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.Literal(LiteralEnum.SingleQuotedStringLiteral("foo\r\nbar\nbaz"))
        )
    }

    test("tokenize unterminated string literal") {
        val sql = "select 'foo"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        result shouldBe Either.Left(TokenizerError(Location(1, 8), "Unterminated string literal"))
    }
    test("tokenize unterminated string literal utf8") {
        val sql = "SELECT \"なにか\" FROM Y WHERE \"なにか\" = 'test;"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        result shouldBe Either.Left(TokenizerError(Location(1, 35), "Unterminated string literal"))
    }

    test("tokenize invalid string cols") {
        val sql = "\n\nSELECT * FROM table\t💝مصطفىh"
        val tokenizer = Tokenizer.new(GenericDialect())
        val result = tokenizer.tokenize(sql)
        requireNotNull(result.getOrNull()) shouldBe listOf(
            Token.WhiteSpace(WhiteSpaceEnum.NewLine),
            Token.WhiteSpace(WhiteSpaceEnum.NewLine),
            Token.makeKeyWord("SELECT"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.Symbol(SymbolEnum.Mul),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("FROM"),
            Token.WhiteSpace(WhiteSpaceEnum.Space),
            Token.makeKeyWord("table"),
            Token.WhiteSpace(WhiteSpaceEnum.Tab),
            Token.IllegalChar("💝"),
            Token.makeWord("مصطفىh", null),
        )
    }
    context("tokenize dollar quoted string tagged") {
        withData(
            TestCase(
                $$$$"SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$", listOf(
                    Token.makeKeyWord("SELECT"),
                    Token.WhiteSpace(WhiteSpaceEnum.Space),
                    Token.Literal(
                        LiteralEnum.DollarQuotedStringLiteral(
                            $$"dollar '$' quoted strings have $tags like this$ or like this $$",
                            Some("tag")
                        )
                    )
                )
            ),
            TestCase(
                $$"SELECT $abc$x$ab$abc$", listOf(
                    Token.makeKeyWord("SELECT"),
                    Token.WhiteSpace(WhiteSpaceEnum.Space),
                    Token.Literal(
                        LiteralEnum.DollarQuotedStringLiteral(
                            $$"x$ab",
                            Some("abc")
                        )
                    )
                )
            ),
            TestCase(
                $$$"SELECT $abc$$abc$", listOf(
                    Token.makeKeyWord("SELECT"),
                    Token.WhiteSpace(WhiteSpaceEnum.Space),
                    Token.Literal(
                        LiteralEnum.DollarQuotedStringLiteral(
                            "",
                            Some("abc")
                        )
                    )
                )
            ),
            TestCase(
                $$$"0$abc$$abc$1", listOf(
                    Token.Literal(LiteralEnum.Number("0", null)),
                    Token.Literal(
                        LiteralEnum.DollarQuotedStringLiteral(
                            "",
                            Some("abc")
                        )
                    ),
                    Token.Literal(LiteralEnum.Number("1", null))
                )
            ),
            TestCase(
                $$$"$function$abc$q$data$q$$function$", listOf(
                    Token.Literal(
                        LiteralEnum.DollarQuotedStringLiteral(
                            $$"abc$q$data$q$",
                            Some("function")
                        )
                    ),
                )
            )
        ) { (sql, expectedTokens) ->
            val tokenizer = Tokenizer.new(GenericDialect())
            val result = tokenizer.tokenize(sql)
            requireNotNull(result.getOrNull()) shouldBe expectedTokens
        }
    }


})