import {createToken, Lexer} from "chevrotain";

const Identifier = createToken({name: "Identifier", pattern: /[a-zA-Z]\w*/});


// A little mini DSL for easier lexer definition.
const fragments = {};
const f = fragments;

function FRAGMENT(name, def) {
    fragments[name] = typeof def === "string" ? def : def.source;
}

function makePattern(strings, ...args) {
    let combined = "";
    for (let i = 0; i < strings.length; i++) {
        combined += strings[i];
        if (i < args.length) {
            let pattern = args[i];
            // if a TokenType was passed
            if (args[i].PATTERN) {
                pattern = args[i].PATTERN;
            }
            const patternSource =
                typeof pattern === "string" ? pattern : pattern.source;
            // By wrapping in a RegExp (none) capturing group
            // We enabled the safe usage of qualifiers and assertions.
            combined += `(?:${patternSource})`;
        }
    }
    return new RegExp(combined);
}

FRAGMENT(
    "NameStartChar",
    "([a-zA-Z])",
);

console.log(111)
console.log(f.NameStartChar)

FRAGMENT(
    "NameChar",
    makePattern`${f.NameStartChar}|-|_|\d|]`,
);

FRAGMENT("Name", makePattern`${f.NameStartChar}(${f.NameChar})*`);

const SEA_WS = createToken({
    name: "SEA_WS",
    pattern: /( |\t|\n|\r\n)+/,
});

const OPEN = createToken({name: "OPEN", pattern: /{/});

const Name = createToken({name: "Name", pattern: makePattern`${f.Name}`});

const CLOSE = createToken({name: "CLOSE", pattern: /}/, pop_mode: true});

const div = createToken({
    name: "div",
    pattern: /div/,
    longer_alt: Identifier,
});

const LCurly = createToken({
    name: "LCurly",
    pattern: /{/
});

const RCurly = createToken({
    name: "RCurly",
    pattern: /}/
});

// note we are placing WhiteSpace first as it is very common thus it will speed up the lexer.
let allTokens = [
    div,
    OPEN,
    Name,
    CLOSE,
    SEA_WS,
    Identifier,
];
let SelectLexer = new Lexer(allTokens);

let inputText = "div { div { 123 }}";
let lexingResult = SelectLexer.tokenize(inputText);

console.log(lexingResult.tokens)
