import { PL00Lexer } from "../antlr/PL00Lexer";
import { PL00Parser } from "../antlr/PL00Parser";
import {
  CommonTokenStream,
  error,
  InputStream,
  Parser,
} from "../../node_modules/antlr4/index.js";
import ILineTokens = monaco.languages.ILineTokens;
import IToken = monaco.languages.IToken;
export class PL00State implements monaco.languages.IState {
  clone(): monaco.languages.IState {
    return new PL00State();
  }
  equals(other: monaco.languages.IState): boolean {
    return true;
  }
}
export class PL00TokensProvider implements monaco.languages.TokensProvider {
  getInitialState(): monaco.languages.IState {
    return new PL00State();
  }
  tokenize(
    line: string,
    state: monaco.languages.IState
  ): monaco.languages.ILineTokens {
    return tokensForLine(line);
  }
}
const EOF = -1;
class PL00Token implements IToken {
  scopes: string;
  startIndex: number;
  constructor(ruleName: String, startIndex: number) {
    if (ruleName === null) {
      this.scopes = "unrecognized.pl00";
    } else {
      this.scopes = ruleName.toLowerCase() + ".pl00";
    }
    this.startIndex = startIndex;
  }
}
class PL00LineTokens implements ILineTokens {
  endState: monaco.languages.IState;
  tokens: monaco.languages.IToken[];
  constructor(tokens: monaco.languages.IToken[]) {
    this.endState = new PL00State();
    this.tokens = tokens;
  }
}
export function tokensForLine(input: string): monaco.languages.ILineTokens {
  var errorStartingPoints: number[] = [];
  class ErrorCollectorListener extends error.ErrorListener {
    syntaxError(recognizer, offendingSymbol, line, column, msg, e) {
      //console.log([line , column , msg])
      errorStartingPoints.push(column);
    }
  }
  const chars = new InputStream(input);
  // 词法分析设置
  const lexer = new PL00Lexer(chars);
  // lexer.strictMode = false;
  lexer.removeErrorListeners();
  let errorListener = new ErrorCollectorListener();
  lexer.addErrorListener(errorListener);
  // 结束符
  let done = false;
  let myTokens: monaco.languages.IToken[] = [];
  do {
    let token = lexer.nextToken();
    if (token == null) {
      done = true;
    } else {
      // EOF
      if (token.type == EOF) {
        done = true;
      } else {
        let tokenTypeName = lexer.symbolicNames[token.type];
        let myToken = new PL00Token(tokenTypeName, token.column);
        myTokens.push(myToken);
      }
    }
  } while (!done);
  // 记录所有错误
  for (let e of errorStartingPoints) {
    myTokens.push(new PL00Token("error.pl00", e));
  }
  myTokens.sort((a, b) => (a.startIndex > b.startIndex ? 1 : -1));
  return new PL00LineTokens(myTokens);
}
