import {
  PNode,
  isConvergenced,
  isFullNode,
  isNoChildrenNode,
  isNotFullNode,
} from "./utils";
import {
  LexerToken,
  NegateToken,
  RootToken,
  LexerTokenType,
  ParStartToken,
  FunctionToken,
} from "./lexerToken";

export interface ParserNode {
  token: LexerToken;
  children: Array<ParserNode | string>;
  maxChildren: number;
}

// 语法分析
class Parser {
  stack: ParserNode[] = [];
  constructor() {
    this.stack.push(PNode.genPNodeByToken(new RootToken()));
  }

  get top() {
    return this.stack[this.stack.length - 1];
  }
  // 匹配到NUMBEAR
  matchNumber(token: LexerToken) {
    if (isFullNode(this.top)) throw new Error("数字前一项不能是满项");
    const node = PNode.genPNodeByToken(token);
    if (isNotFullNode(this.top)) {
      return this.topChildPush(node);
    } else {
      return this.stack.push(node);
    }
  }
  // 匹配操作符号
  matchOperator(token: LexerToken) {
    if (isFullNode(this.top) /** 后置 */) {
      if (token.level > this.top.token.level) {
        return this.rob(token, this.top.children);
      } else {
        this.link(token);
        return this.retire(token);
      }
    } else {
      if (token.value === "-")
        return this.stackPush(PNode.genPNodeByToken(new NegateToken()));
      if (token.value === "+") return;
      throw new Error(token.value + "符号不能前置");
    }
  }

  // 匹配 ‘(’，‘)’
  matchPar(token: LexerToken) {
    if (token.type === LexerTokenType.PAR_START) {
      if (isFullNode(this.top)) throw new Error("not a function");
      return this.stackPush(PNode.genPNodeByToken(token));
    } else {
      if (isNotFullNode(this.top)) throw new Error("Unexpected token )");
      if (
        isNoChildrenNode(this.top) &&
        this.top.token.type !== LexerTokenType.FUNCTION
      ) {
        throw new Error("Unexpected token )");
      }

      // 收敛 ( 或者 function (
      this.link(new ParStartToken());

      return this.convergenceParEnd();
    }
  }
  /** 匹配EFO */
  matchEfo() {
    // 判断栈顶是不是可以被收敛
    for (let node of this.stack) {
      if (!isConvergenced(node)) {
        this.stack = [PNode.genPNodeByToken(new RootToken())];
        throw new Error("没有被收敛");
      }
    }
    // 收敛 ROOT
    this.link(new RootToken());
    return this.convergence(() => this.top.token.type === LexerTokenType.ROOT);
  }

  /** 匹配FUNCTION */
  matchFunction(token: LexerToken) {
    if (
      isFullNode(this.top) ||
      this.top.token.type === this.top.token.oppostie
    ) {
      debugger;
      throw new Error("Unexpected token" + token.value);
    }
    return this.stackPush(PNode.genPNodeByToken(token));
  }
  /** 匹配',' */
  matchComma(token: LexerToken) {
    if (isNoChildrenNode(this.top)) throw new Error("Unexpected token ,");
    if (isNotFullNode(this.top)) throw new Error("Unexpected token ,");
    this.link(new FunctionToken(""));
    return this.stackPush(PNode.genPNodeByToken(token));
  }
  /** 匹配数据引用 Data */
  matchData(token: LexerToken) {
    if (isFullNode(this.top)) throw new Error("引用数据前一项不能是满项");
    const node = PNode.genPNodeByToken(token);
    if (isNotFullNode(this.top)) {
      return this.topChildPush(node);
    } else {
      return this.stack.push(node);
    }
  }

  topChildPush(node: ParserNode) {
    this.top.children.push(node);
  }

  rob(token: LexerToken, next: Array<string | ParserNode>) {
    const child = next.pop()!;
    const n = PNode.genPNodeByToken(token)!;
    n.children.push(child);
    this.stackPush(n);
  }

  retire(token: LexerToken) {
    const oldTop = this.stack.pop()!;
    const node = PNode.genPNodeByToken(token);
    node.children = [oldTop];
    this.stack.push(node);
  }

  link(token: LexerToken) {
    while (
      isFullNode(this.top) &&
      isNotFullNode(this.stack[this.stack.length - 2]) &&
      token.level <= this.top.token.level &&
      token.level <= this.stack[this.stack.length - 2].token.level
    ) {
      this.stack[this.stack.length - 2].children.push(this.stack.pop()!);
    }
  }

  // 收敛节点
  convergence(converBy: () => boolean) {
    const tempStack: ParserNode[] = [];
    while (
      this.stack.length > 0 &&
      !(converBy() && !this.top.children.length)
    ) {
      tempStack.push(this.stack.pop()!);
    }

    if (converBy()) {
      this.top.token.isConvergenced = true;
      this.top.token.type = this.top.token.oppostie!;
      this.top.token.level = 6;
      this.top.children = [];
      while (tempStack.length > 0) {
        this.top.children.push(tempStack.pop()!);
      }
      this.top.maxChildren = this.top.children.length;
    }
  }

  // 收敛 ）
  convergenceParEnd() {
    const tempStack: ParserNode[] = [];
    const isMatched = () =>
      [LexerTokenType.PAR_START, LexerTokenType.FUNCTION].includes(
        this.top.token.type
      );

    while (
      this.stack.length > 0 &&
      !(isMatched() && !this.top.children.length)
    ) {
      tempStack.push(this.stack.pop()!);
    }

    if (isMatched()) {
      this.top.token.isConvergenced = true;
      this.top.token.type = this.top.token.oppostie!;
      this.top.token.level = 6;
      this.top.children = [];
      while (tempStack.length > 0) {
        this.top.children.push(tempStack.pop()!);
      }
      this.top.maxChildren = this.top.children.length;
    }
  }

  stackPush(node: ParserNode) {
    this.stack.push(node);
  }

  push(token: LexerToken) {
    if (token.type === LexerTokenType.NUMBER) {
      this.matchNumber(token);
    } else if (token.type === LexerTokenType.OPERATOR) {
      this.matchOperator(token);
    } else if (
      token.type === LexerTokenType.PAR_START ||
      token.type === LexerTokenType.PAR_END
    ) {
      this.matchPar(token);
    } else if (token.type === LexerTokenType.EOF) {
      this.matchEfo();
    } else if (token.type === LexerTokenType.FUNCTION) {
      this.matchFunction(token);
    } else if (token.type === LexerTokenType.COMMA) {
      this.matchComma(token);
    } else if (token.type === LexerTokenType.DATA) {
      this.matchData(token);
    }
  }

  end() {
    const result = this.stack.pop();
    this.stack = [PNode.genPNodeByToken(new RootToken())];
    return result;
  }
}

export default Parser;
