const Tokenizer = @This();
const std = @import("std");
const String = []const u8;

buffer: String,
index: usize = 0,

pub fn init(source: String) Tokenizer {
    return .{
        .buffer = source,
    };
}

pub fn next(self: *Tokenizer) ?Token {
    // skip whitespace
    while (self.index < self.buffer.len) {
        switch (self.buffer[self.index]) {
            '\r', '\n', ' ', '\t' => self.index += 1,
            '/' => {
                if (self.buffer[self.index + 1] == '/') {
                    self.index += std.mem.indexOf(u8, self.buffer[self.index..], "\n").? + 1;
                } else if (self.buffer[self.index + 1] == '*') {
                    self.index += std.mem.indexOf(u8, self.buffer[self.index..], "*/").? + 2;
                } else {
                    break;
                }
            },
            else => break,
        }
    }

    // EOF
    if (self.index >= self.buffer.len) {
        return null;
    }

    return switch (self.buffer[self.index]) {
        '"' => blk: {
            const end = std.mem.indexOf(u8, self.buffer[self.index + 1 ..], "\"").?;
            const lexeme = self.buffer[self.index + 1 .. self.index + 1 + end];
            self.index = self.index + 2 + end;
            break :blk .{
                .tag = .stringConstant,
                .lexeme = lexeme,
            };
        },
        '0'...'9' => blk: {
            var end = self.index;
            while (end < self.buffer.len and std.ascii.isDigit(self.buffer[end])) {
                end += 1;
            }
            const lexeme = self.buffer[self.index..end];
            self.index = end;
            // INT_CONSTs are values in the range 0 to 32767
            break :blk .{
                .tag = .integerConstant,
                .lexeme = lexeme,
                .number = std.fmt.parseInt(u15, lexeme, 10) catch unreachable,
            };
        },
        'a'...'z', 'A'...'Z', '_' => blk: {
            var end = self.index;
            while (end < self.buffer.len and (std.ascii.isAlphanumeric(self.buffer[end]) or self.buffer[end] == '_')) {
                end += 1;
            }
            const lexeme = self.buffer[self.index..end];
            self.index = end;
            if (std.meta.stringToEnum(Token.KeyWord, lexeme)) |keyword| {
                break :blk .{
                    .tag = .keyword,
                    .lexeme = lexeme,
                    .keyword = keyword,
                };
            } else {
                break :blk .{
                    .tag = .identifier,
                    .lexeme = lexeme,
                };
            }
        },
        else => blk: {
            const lexeme = self.buffer[self.index .. self.index + 1];
            self.index += 1;
            break :blk .{
                .tag = .symbol,
                .lexeme = lexeme,
            };
        },
    };
}

fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !void {
    var tokenizer: Tokenizer = .init(source);
    for (expected_token_tags) |expected_token_tag| {
        const token = tokenizer.next().?;
        std.testing.expectEqual(expected_token_tag, token.tag) catch |err| {
            std.log.err("token: {}", .{token});
            return err;
        };
    }
    const last_token = tokenizer.next();
    try std.testing.expect(last_token == null);
}

test "tokenizer" {
    try testTokenize("static", &.{.keyword});
    try testTokenize(
        "\t let a = Array.new(length);",
        &.{ .keyword, .identifier, .symbol, .identifier, .symbol, .identifier, .symbol, .identifier, .symbol, .symbol },
    );
}

pub const Token = struct {
    tag: Tag,
    lexeme: String,
    keyword: ?KeyWord = null,
    number: ?usize = null,

    pub const Tag = enum(u3) {
        keyword,
        symbol,
        identifier,
        integerConstant,
        stringConstant,
    };

    pub const KeyWord = enum {
        class,
        method,
        function,
        constructor,
        int,
        boolean,
        char,
        void,
        @"var",
        static,
        field,
        let,
        do,
        @"if",
        @"else",
        @"while",
        @"return",
        true,
        false,
        null,
        this,
    };

    const primitive_types = &[_]KeyWord{ .int, .char, .boolean };
    pub const types = .{ primitive_types, Tag.identifier };

    /// Expected a symbol (char), Tag, KeyWord, symbol list (String), KeyWord list
    /// or a tuple containing any combination of these types.
    pub fn match(token: Token, expect: anytype) bool {
        const Child = std.meta.Child;
        const Type = @TypeOf(expect);

        return switch (Type) {
            comptime_int => token.tag == .symbol and token.lexeme[0] == expect,
            Tag => token.tag == expect,
            KeyWord => token.keyword == expect,
            else => switch (@typeInfo(Type)) {
                // tag or keyword literal
                .enum_literal => el: {
                    const inferred_type = if (@hasField(Tag, @tagName(expect))) Tag else KeyWord;
                    break :el token.match(@as(inferred_type, expect));
                },

                .pointer => switch (Child(Child(Type))) {
                    // symbol list: string literal
                    u8 => token.tag == .symbol and
                        std.mem.indexOfScalar(u8, expect, token.lexeme[0]) != null,
                    // keyword list
                    KeyWord => token.tag == .keyword and
                        std.mem.indexOfScalar(KeyWord, expect, token.keyword.?) != null,
                    else => unreachable,
                },

                // tuple
                .@"struct" => inline for (expect) |i| {
                    if (token.match(i)) break true;
                } else false,

                else => unreachable,
            },
        };
    }

    pub fn format(self: Token, comptime _: String, _: std.fmt.FormatOptions, w: anytype) !void {
        try w.print("<{0s}> {1s} </{0s}>", .{ @tagName(self.tag), escape(self.lexeme) });
    }

    fn escape(symbol: String) String {
        return switch (symbol[0]) {
            '<' => "&lt;",
            '>' => "&gt;",
            '"' => "&quot;",
            '&' => "&amp;",
            else => symbol,
        };
    }
};
