import 'dart:collection';
import 'dart:convert';

import '../error/error_msg.dart';
import 'core.dart';

/**
 *   PACKAGE:
 *   USER:Administrator
 *   TIME:2021/1/3 0003 12:40
 *   Description:词法分析器
 */

enum LexerStatus {
  INITIAL_STATUS, //初始状态
  COMMENT_STATUS, //注释状态
  NUM_STATUS, //整形状态
  String_STATUS, //字符串
  String_STATUS1, //转义字符
  IDENTIFIER_STATUS, //标识符
  REG_STATUS, //正则表达式
}

enum TokenType {
  Invalid,
  INT_LITERAL,
  DOUBLE_LITERAL,
  STRING_LITERAL,
  COMPLEX_LITERAL,
  IDENTIFIER,
  GLOBAL,
  FUNCTION,
  IF,
  ELSE,
  ELSIF,
  WHILE,
  FOR,
  RETURN_T,
  BREAK,
  CONTINUE,
  NULL_T,
  LP,
  RP,
  LC,
  RC,
  SEMICOLON,
  COMMA,
  ASSIGN_T,
  LOGICAL_AND,
  LOGICAL_OR,
  EQ,
  NE,
  GT,
  GE,
  LT,
  LE,
  LB,
  RB,
  ADD,
  SUB,
  MUL,
  DIV,
  MOD,
  COLON,
  TRUE_T,
  FALSE_T,
  SWITCH,
  CASE,
  DEFAULT,
  TRY,
  CATCH,
  FINALLY,
  IMPORT,
  DOT,
  AS,
  EXCLAMATION,
  MOD_ASSIGN_T,
  BIT_AND_ASSIGN_T,
  BIT_AND,
  DECREMENT,
  INCREMENT,
  MUL_ASSIGN_T,
  ADD_ASSIGN_T,
  SUB_ASSIGN_T,
  DIV_ASSIGN_T,
  BIT_XOR_ASSIGN_T,
  DOT_MUL,
  DOT_DIV,
  BIT_XOR,
  BIT_OR_ASSIGN_T,
  BIT_OR,
  BIT_NOT,
  CONST,
  REGEXP_LITERAL,
  END_OF_FILE
}

Map<String, TokenType> keyWord = {
  "function": TokenType.FUNCTION,
  "if": TokenType.IF,
  "else": TokenType.ELSE,
  "elsif": TokenType.ELSIF,
  "switch": TokenType.SWITCH,
  "case": TokenType.CASE,
  "default": TokenType.DEFAULT,
  "while": TokenType.WHILE,
  "for": TokenType.FOR,
  "return": TokenType.RETURN_T,
  "continue": TokenType.CONTINUE,
  "break": TokenType.BREAK,
  "null": TokenType.NULL_T,
  "true": TokenType.TRUE_T,
  "false": TokenType.FALSE_T,
  "try": TokenType.TRY,
  "catch": TokenType.CATCH,
  "finally": TokenType.FINALLY,
  "import": TokenType.IMPORT,
  "const": TokenType.CONST,
  "as": TokenType.AS
};

//保存每个Token式子的信息
class TokenExpression {
  TokenType tokenType;
  String identifier;
  int line_number;
  int l_number; //行列号
  Expression expression;
  String error;
  LinkedList<Parameter> parameter_list;
  LinkedList<ArgumentList> argument_list;
  Statement statement;
  LinkedList<Statement> statement_list;
  Block block;
  LinkedList<Expression> expressionList;
  LinkedList<CaseList> case_list;
  CaseList case_;
  LinkedList<Elsif> elsif_list;
  Elsif elsif;
  LinkedList<IdentifierList> identifier_list;
  // static Array array;
  TokenExpression() {
    this.tokenType = TokenType.Invalid;
    expression = Expression();
    parameter_list = LinkedList();
    argument_list = LinkedList();
    statement = Statement();
    statement_list = LinkedList();
    block = Block();
    elsif_list = LinkedList();
    identifier_list = LinkedList();
    expressionList = LinkedList();
    case_list = LinkedList();
    // array = Array();
  }
  TokenExpression.tokenType(TokenType tokenType) {
    this.tokenType = tokenType;
  }
  @override
  String toString() {
    return tokenType.toString();
  }
}

TokenExpression getToken() {
  List current_token = List<int>();
  LexerStatus status = LexerStatus.INITIAL_STATUS; //词法分析器状态机
  TokenExpression tokenExpression = TokenExpression();
  String temp1;

  while (true) {
    if (current_pos >= code_len) {
      tokenExpression.tokenType = TokenType.END_OF_FILE;
      return tokenExpression;
    }
    current_char = code[current_pos];
    current_pos++;
    current_posL++;

    //空格制表符处理,注释处理
    if (current_char == 0x0A) {
      current_posL = 0;
      current_posH++;
      if (status == LexerStatus.COMMENT_STATUS) {
        status = LexerStatus.INITIAL_STATUS;
      }
      continue;
    }

    switch (status) {
      case LexerStatus.INITIAL_STATUS:
        break;
      case LexerStatus.IDENTIFIER_STATUS:
        if (current_char == 0x5F ||
            (current_char >= 0x41 && current_char <= 0x5A) ||
            (current_char >= 0x61 && current_char <= 0x7A) ||
            (current_char >= 0x30 && current_char <= 0x39)) {
          current_token.add(current_char);
          continue;
        } else {
          status = LexerStatus.INITIAL_STATUS;
          temp1 = utf8.decode(current_token);
          current_pos--;
          current_posL--;
          //关键字
          if (keyWord.containsKey(temp1)) {
            tokenExpression.tokenType = keyWord[temp1];
            if (tokenExpression.tokenType == TokenType.GLOBAL) {
              tokenExpression.tokenType = TokenType.Invalid;
              tokenExpression.error =
                  "[${temp1}]${errorMsg[ErrorMsg.ERROR_KEYWORD]}";
            }
          } else {
            tokenExpression.identifier = temp1;
          }

          return tokenExpression;
        }
        break;
      case LexerStatus.NUM_STATUS:
        if ((current_char >= 0x30 && current_char <= 0x39) ||
            current_char == 0x2E) {
          current_token.add(current_char);
          continue;
        } else {
          status = LexerStatus.INITIAL_STATUS;
          temp1 = utf8.decode(current_token);
          Expression expression = Expression();
          if (temp1.contains('.')) {
            if (temp1.endsWith('.') || (temp1.split('.').length > 2)) {
              tokenExpression.tokenType = TokenType.Invalid;
              tokenExpression.error =
                  "[${temp1}]${errorMsg[ErrorMsg.ERROR_NUM]}";
              return tokenExpression;
            }

            expression.type = ExpressionType.DOUBLE_EXPRESSION;
            expression.double_value = double.parse(temp1);
            tokenExpression.tokenType = TokenType.DOUBLE_LITERAL;
          } else {
            expression.type = ExpressionType.INT_EXPRESSION;
            expression.int_value = int.parse(temp1);
            //expression.double_value = double.parse(temp1);
          }
          if (current_char == 0x69)) {
            //i,j
            expression.type = ExpressionType.COMPLEX_EXPRESSION;
            tokenExpression.tokenType = TokenType.COMPLEX_LITERAL;
          } else {
            current_pos--;
            current_posL--;
          }
          tokenExpression.expression = expression;
          return tokenExpression;
        }
        break;
      case LexerStatus.String_STATUS:
        if (current_char == 0x5C) {
          //转义字符
          status = LexerStatus.String_STATUS1;
          continue;
        }
        if (current_char == 0x22) {
          status = LexerStatus.INITIAL_STATUS;
          Expression expression = Expression();
          expression.type = ExpressionType.STRING_EXPRESSION;
          expression.string_value = utf8.decode(current_token);
          tokenExpression.expression = expression;
          return tokenExpression;
        }
        current_token.add(current_char);
        continue;
        break;
      case LexerStatus.String_STATUS1: //转义字符
        switch (current_char) {
          case 0x61:
            current_token.add(0x07);
            break;
          case 0x62:
            current_token.add(0x08);
            break;
          case 0x66:
            current_token.add(0x0C);
            break;
          case 0x6E:
            current_token.add(0x0a);
            break;
          case 0x72:
            current_token.add(0x0D);
            break;
          case 0x74:
            current_token.add(0x09);
            break;
          case 0x76:
            current_token.add(0x0B);
            break;
          default:
            current_token.add(current_char);
        }
        status = LexerStatus.String_STATUS;
        continue;
        break;
      case LexerStatus.COMMENT_STATUS:
        continue;
        break;
      case LexerStatus.REG_STATUS:
        if (current_char == 0x5C) {
          //转义字符
          current_token.add(code[current_pos]);
          current_pos++;
          continue;
        }
        if (current_char == 0x27) {
          status = LexerStatus.INITIAL_STATUS;
          Expression expression = Expression();
          expression.type = ExpressionType.REGEXP_EXPRESSION;
          expression.string_value = utf8.decode(current_token);
          tokenExpression.expression = expression;
          return tokenExpression;
        }
        current_token.add(current_char);
        continue;
        break;
    }

    //空格制表符处理,注释处理
    if (current_char == 0x20 || current_char == 0x09) {
      continue;
    }

    tokenExpression.l_number = current_posL;

    //数字处理[0-9]+\.[0-9]+
    if (current_char >= 0x30 && current_char <= 0x39) {
      status = LexerStatus.NUM_STATUS;
      tokenExpression.tokenType = TokenType.INT_LITERAL;
      current_token.add(current_char);
      continue;
    }
    //标识符处理 关键字在处理完后识别 [A-Za-z_][A-Za-z_0-9]*
    if (current_char == 0x5F ||
        (current_char >= 0x41 && current_char <= 0x5A) ||
        (current_char >= 0x61 && current_char <= 0x7A)) {
      status = LexerStatus.IDENTIFIER_STATUS;
      tokenExpression.tokenType = TokenType.IDENTIFIER;
      current_token.add(current_char);
      continue;
    }

    switch (current_char) {
      case 0x0D:
        break;
      case 0x21: //!
        if (code[current_pos] == 0x3D) {
          //!=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.NE;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.EXCLAMATION;
        return tokenExpression;
        break;
      case 0x22: //字符串，允许换行[扩展]
        status = LexerStatus.String_STATUS;
        tokenExpression.tokenType = TokenType.STRING_LITERAL;
        break;
      case 0x23: //注释处理#
        status = LexerStatus.COMMENT_STATUS;
        break;
      case 0x24: //全局变量$
        if (code[current_pos] == 0x5F ||
            (code[current_pos] >= 0x41 && code[current_pos] <= 0x5A) ||
            (code[current_pos] >= 0x61 && code[current_pos] <= 0x7A)) {
          status = LexerStatus.IDENTIFIER_STATUS;
          tokenExpression.tokenType = TokenType.GLOBAL;
        } else {
          tokenExpression.tokenType = TokenType.Invalid;
          tokenExpression.error = "[${temp1}]${errorMsg[ErrorMsg.ERROR_NUM]}";
          return tokenExpression;
        }
        break;
      case 0x25: //%
        if (code[current_pos] == 0x3D) {
          //%=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.MOD_ASSIGN_T;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.MOD;
        return tokenExpression;
      case 0x26: //&
        if (code[current_pos] == 0x26) {
          //&&
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.LOGICAL_AND;
          return tokenExpression;
        } else if (code[current_pos] == 0x3D) {
          //&=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.BIT_AND_ASSIGN_T;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.BIT_AND;
        return tokenExpression;
      case 0x27: //'
        status = LexerStatus.REG_STATUS;
        tokenExpression.tokenType = TokenType.REGEXP_LITERAL;
        break;
      case 0x28: //(
        tokenExpression.tokenType = TokenType.LP;
        return tokenExpression;
      case 0x29: //)
        tokenExpression.tokenType = TokenType.RP;
        return tokenExpression;
      case 0x2A: //*
        if (code[current_pos] == 0x3D) {
          //*=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.MUL_ASSIGN_T;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.MUL;
        return tokenExpression;
      case 0x2B: //+
        if (code[current_pos] == 0x3D) {
          //+=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.ADD_ASSIGN_T;
          return tokenExpression;
        } else if (code[current_pos] == 0x2B) {
          //+=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.INCREMENT;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.ADD;
        return tokenExpression;
      case 0x2C: //,
        tokenExpression.tokenType = TokenType.COMMA;
        return tokenExpression;
      case 0x2D: //-
        if (code[current_pos] == 0x3D) {
          //-=
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.SUB_ASSIGN_T;
          return tokenExpression;
        } else if (code[current_pos] == 0x2D) {
          //+=
          current_pos++;
          current_posL++;
          tokenExpression.tokenType = TokenType.DECREMENT;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.SUB;
        return tokenExpression;
      case 0x2E: //.
        if (code[current_pos] == 0x2A) {
          //.*
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.DOT_MUL;
          return tokenExpression;
        } else if (code[current_pos] == 0x2F) {
          //./
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.DOT_DIV;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.DOT;
        return tokenExpression;
      case 0x2F: // /
        if (code[current_pos] == 0x3D) {
          // /=
          //<=
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.DIV_ASSIGN_T;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.DIV;
        return tokenExpression;
      case 0x3A: //:
        tokenExpression.tokenType = TokenType.COLON;
        return tokenExpression;
      case 0x3B: //;
        tokenExpression.tokenType = TokenType.SEMICOLON;
        return tokenExpression;
      case 0x3C: //<
        if (code[current_pos] == 0x3D) {
          //<=
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.LE;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.LT;
        return tokenExpression;
      case 0x3D: //=
        if (code[current_pos] == 0x3D) {
          //==
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.EQ;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.ASSIGN_T;
        return tokenExpression;
      case 0x3E: //>
        if (code[current_pos] == 0x3D) {
          //>=
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.GE;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.GT;
        return tokenExpression;
      // case 0x3F://?
      //   return TokenExpression.tokenType(TokenType.LT);
      // case 0x40://@
      //   return TokenExpression.tokenType(TokenType.LT);
      case 0x5B: //[
        tokenExpression.tokenType = TokenType.LB;
        return tokenExpression;
      // case 0x5C://\
      //   return TokenExpression.tokenType(TokenType.RB);
      case 0x5D: //]
        tokenExpression.tokenType = TokenType.RB;
        return tokenExpression;
      case 0x5E: //^
        if (code[current_pos] == 0x3D) {
          //^=
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.BIT_XOR_ASSIGN_T;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.BIT_XOR;
        return tokenExpression;
      // case 0x60://`
      //   return TokenExpression.tokenType(TokenType.LT);
      case 0x7B: //{
        tokenExpression.tokenType = TokenType.LC;
        return tokenExpression;
      case 0x7C: //|
        if (code[current_pos] == 0x7C) {
          //||
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.LOGICAL_OR;
          return tokenExpression;
        } else if (code[current_pos] == 0x3D) {
          //|=
          current_posL++;
          current_pos++;
          tokenExpression.tokenType = TokenType.BIT_OR_ASSIGN_T;
          return tokenExpression;
        }
        tokenExpression.tokenType = TokenType.BIT_OR;
        return tokenExpression;
      case 0x7D: //}
        tokenExpression.tokenType = TokenType.RC;
        return tokenExpression;

      case 0x7E: //~
        tokenExpression.tokenType = TokenType.BIT_NOT;
        return tokenExpression;
      default: //是不识别的字符
        current_token.add(current_char);
        tokenExpression.error =
            "[${current_char.toString()}] ${errorMsg[ErrorMsg.ERROR_UNKNOWN]}";
        return tokenExpression;
    }
  }
}
