from ply import lex
from ply.lex import Lexer, LexToken
import ply.yacc as yacc
from ply.yacc import LRParser
import json, traceback

tokens = (
    "STARTUML",
    "START",
    "ENDUML",
    "STOP",
    "NEW_LINE",
    "BLANK_CHAR",
    "STEP",
    "IF_START",
    "IF_ELSE",
    "IF_END",
)

t_STARTUML = r"@startuml"
t_START = r"start"
t_ENDUML = r"@enduml"
t_STOP = r"stop"
t_NEW_LINE = r"\r?\n"
def t_BLANK_CHAR(t):
    r"[^\S\r\n]+"
    # print(f"BLANK_CHAR: {t}")
t_STEP = r":[^\r\n]+;"
t_IF_START = r"if[^\S\r\n]+\([^\r\n]+\)[^\S\r\n]+then([^\S\r\n]+)*(\([^\r\n]+\))?;?"
t_IF_ELSE = r"else([^\S\r\n]+)*(\([^\r\n]+\))?;?"
t_IF_END = r"end\ if;?|endif;?"


def p_document(p):
    """
    document : STARTUML NEW_LINE START NEW_LINE IF_OR_STEPS STOP NEW_LINE ENDUML
    """
    pass


def p_STEP_LINE(p):
    """
    STEP_LINE : STEP NEW_LINE
        | BLANK_CHAR STEP NEW_LINE
    """
    pass
    print(f"STEP_LINE: {[str(i) for i in p]}")


def p_STEP_LINES(p):
    """
    STEP_LINES : NEW_LINE
        | STEP_LINE
        | STEP_LINES STEP_LINE
    """
    pass
    print(f"STEP_LINES: {[str(i) for i in p]}")


def p_IF_START_WRAP(p):
    """
    IF_START_WRAP : IF_START NEW_LINE
        | BLANK_CHAR IF_START NEW_LINE
    """
    pass
    print(f"IF_START_WRAP: {[str(i) for i in p]}")


def p_IF_ELSE_WRAP(p):
    """
    IF_ELSE_WRAP : IF_ELSE NEW_LINE
        | BLANK_CHAR IF_ELSE NEW_LINE
    """
    pass
    print(f"IF_ELSE_WRAP: {[str(i) for i in p]}")


def p_IF_END_WRAP(p):
    """
    IF_END_WRAP : IF_END NEW_LINE
        | BLANK_CHAR IF_END NEW_LINE
    """
    pass
    print(f"IF_END_WRAP: {[str(i) for i in p]}")

def p_IF_OR_STEPS(p):
    """
    IF_OR_STEPS : STEP_LINES
        | STEP_LINES IF_OR_STEPS
        | IF_OR_STEPS STEP_LINES
        | IF_START_WRAP IF_OR_STEPS IF_END_WRAP
        | IF_START_WRAP IF_OR_STEPS IF_ELSE_WRAP IF_OR_STEPS IF_END_WRAP
    """
    pass
    print(f"IF_OR_STEPS: {[str(i) for i in p]}")


def t_error(t):
    print(f"非法字符 {t}")
    raise Exception()
    # t.lexer.skip(1)


def p_error(p):
    print("非法词 '%s'" % p)
    raise Exception()
    # t.lexer.skip(1)

def is_ok(data: str):
    lexer: Lexer = lex.lex()
    
    # 输出
    # lexer.input(data)
    # for tok in lexer:
    #     tok: LexToken = tok
    #     print(tok)

    try:
        parser: LRParser = yacc.yacc(write_tables=False, debug=True)
        parse_result = parser.parse(data, debug=None)
        print(f"parse_result: {parse_result}")
    except Exception as e:
        return False
    return True


if __name__ == "__main__":
    data = """@startuml
start
:接收设备型号生命周期管理请求;
if (验证用户权限) then (有权限)
  :解析请求参数;
  :返回管理结果给客户端;
else (无权限)
  :返回无权限信息;
endif
stop
@enduml"""

    lexer: Lexer = lex.lex()
    
    # 输出
    lexer.input(data)
    for tok in lexer:
        tok: LexToken = tok
        print(tok)

    parser: LRParser = yacc.yacc(write_tables=False, debug=None)
    print(f"result: {parser.parse(data)}")
