const Spec = [
  // Whitespace:
  [/^\s+/, null],

  // Comments:
  // Skip single-line comments:
  [/^\/\/.*/, null],
  // Skip multi-line comments:
  [/^\/\/*[\s\S]*?\*\//, null],

  // Symbols, delimiters
  // ; 分号
  [/^;/, ";"],

  // {} 大括号
  [/^\{/, "{"],
  [/^\}/, "}"],

  // () 小括号
  [/^\(/, "("],
  [/^\)/, ")"],

  // , 逗号
  [/^,/, ","],

  // keywords:
  [/^\blet\b/, "let"],
  [/^\bif\b/, "if"],
  [/^\belse\b/, "else"],
  [/^\btrue\b/, "true"],
  [/^\bfalse\b/, "false"],
  [/^\bnull\b/, "null"],

  // Numbers:
  [/^\d+/, "NUMBER"],

  // Identifiers: 标识符
  [/^\w+/, "IDENTIFIER"],

  // Equality operators: ==, !=
  [/^[=!]=/, "EQUALITY_OPERATOR"],

  // Assignment operatores: =, *=, /=, +=, -=
  [/^=/, "SIMPLE_ASSIGN"],
  [/^[\*\/\+\-]=/, "COMPLEX_ASSIGN"],

  // Relational operators: >, >=, <, <=
  [/^[><]=?/, "RELATIONAL_OPERATOR"],

  // Logical operators: &&, ||
  [/^&&/, "LOGICAL_AND"],
  [/^\|\|/, "LOGICAL_OR"],

  // Math operators: +, -, *, /
  [/^[+\-]/, "ADDITIVE_OPERATOR"],
  [/^[*\/]/, "MULTIPLICATIVE_OPERATOR"],

  // Strings:
  // 双引号
  [/^"[^"]*"/, "STRING"],
  // 单引号
  [/^'[^']*'/, "STRING"],
];

class Tokenizer {
  init(string) {
    this._string = string;
    this._cursor = 0;
  }

  isEOF() {
    return this._cursor === this._string.length;
  }
  hasMoreTokens() {
    return this._cursor < this._string.length;
  }

  getNextToken() {
    if (!this.hasMoreTokens()) {
      return null;
    }

    const string = this._string.slice(this._cursor);

    for (const [regexp, tokenType] of Spec) {
      const tokenValue = this._match(regexp, string);
      if (tokenValue == null) {
        continue;
      }

      // Should skip token, e.g. whitespace
      if (tokenType == null) {
        return this.getNextToken();
      }
      return {
        type: tokenType,
        value: tokenValue,
      };
    }

    throw new SyntaxError(`Unexpected token: "${string[0]}"`);
  }

  _match(regexp, string) {
    const matched = regexp.exec(string);
    if (matched !== null) {
      this._cursor += matched[0].length;
      return matched[0];
    }
  }
}

module.exports = {
  Tokenizer,
};
