/**
 * a lexer is able to tokenize lines of code into lines of tokens
 * huizi 2024 
 */

import {Terminals} from './terminal.js';
import {token} from './token.js';

function lexer(){
    /**
     * tokenize one line
     */
    function tokenizeLine(line,lineNum=1){
        const tokens = [];
        let regex = new RegExp(Terminals.regex(),"gm");
        let match = null;
        while ((match = regex.exec(line)) !== null) {
            let index = match.index;
            for (let g in match.groups) {
                const lexeme = match.groups[g];
                if(lexeme===undefined) continue;

                const terminal = Terminals.valueOf(g);
                const start = index + match[0].indexOf(lexeme);
                const end = start + lexeme.length;
                tokens.push(token(terminal, lexeme, lineNum, start, end));
                break;
            }
            if(index==regex.lastIndex) break;
        }
        return tokens;
    }

    /**
     * tokenize multi lines
     */
    function tokenizeLines(lines,startLineNum=1){
        const tokens = [];
        lines.split("\n").forEach((line,i)=>tokens.push(tokenizeLine(line,startLineNum+i)));
        return tokens;
    }

    return Object.freeze({
        tokenize: lines=>{
            if(Array.isArray(lines)){
                return tokenizeLines(lines);
            }else{
                return tokenizeLine(lines);
            }
        }
    });
}

export {lexer};