const thexasm_compiler = (()=>{
    // Alias
    const REG = thex.REG
    const MCODE = thex.MCODE

    // ================================================================

    // Token Struct
    //      x       Column of Char-Begin
    //      y       Row of Char-Begin
    //      v       Token value
    //      t       Token type
    const Token_Make = ()=>{
        const t = (x,y,v)=>{return {x,y,v,t}}
        return t
    }
    const Token = {
        // Name of Function/Address
        ID:Token_Make(),
        // Name of Label
        LABEL:Token_Make(),
        // Number
        INTEGER:Token_Make(),
        FLOAT:Token_Make(),
        // C-Style String
        STRING:Token_Make(),
        // Symbols
        PAREN_L0:Token_Make(),      // (
        PAREN_L1:Token_Make(),      // [
        PAREN_L2:Token_Make(),      // {
        PAREN_R2:Token_Make(),      // }
        PAREN_R1:Token_Make(),      // ]
        PAREN_R0:Token_Make(),      // )
        DOT:Token_Make(),           // .
        SET:Token_Make(),           // :=
        SELF_ADD:Token_Make(),      // +=
        SELF_SUB:Token_Make(),      // -=
        SELF_MUL:Token_Make(),      // *=
        SELF_DIV:Token_Make(),      // /=
        SELF_MOD:Token_Make(),      // %=
        ADD:Token_Make(),           // +
        SUB:Token_Make(),           // -
        MUL:Token_Make(),           // *
        DIV:Token_Make(),           // /
        MOD:Token_Make(),           // %
        // Special Registers
        REGS_IP:Token_Make(),       // ip
        REGS_SP:Token_Make(),       // sp
        REGS_MSI:Token_Make(),      // msi
        REGS_MSN:Token_Make(),      // msn
        REGS_MDI:Token_Make(),      // mdi
        REGS_MDN:Token_Make(),      // mdn
        REGS_R:Token_Make(),        // r
        REGS_N:Token_Make(),        // n
        // Universally Registers
        REGS_UNIVERSALLYS:Token_Make(),
        // Float Registers
        REGS_FLOAT:Token_Make(),
        // Named Instructions
        PUSH:Token_Make(),          // push
        PUSHR:Token_Make(),         // pushr
        POP:Token_Make(),           // pop
        JUMP:Token_Make(),          // jump
        // Other Keywords
        SECTION:Token_Make(),       // section
        END:Token_Make(),           // end
        ENDLINE:Token_Make(),       // like C ';'
        // for Error
        UNKNOWN:Token_Make(),
    }

    // Lexer for Source Code to Tokens
    // Input:
    //      code:string     Source code
    // Output: []
    //      Token array
    function Lexer(code){
        function MapKeywords(v){
            switch (v){
                // Match Registers
                case "ip": return Token.REGS_IP
                case "sp": return Token.REGS_SP
                case "r": return Token.REGS_R
                case "n": return Token.REGS_N
                case "a": case "b": case "c": case "d": case "e": case "f": case "g": case "h": case "i": case "j":
                    return Token.REGS_UNIVERSALLYS
                case "f0": case "f1": case "f2": case "f3": case "f4": case "f5": case "f6": case "f7": case "f8":
                    return Token.REGS_FLOAT
                // Match Operation Keywords
                case "push": return Token.PUSH
                case "pushr": return Token.PUSHR
                case "pop": return Token.POP
                case "jump": return Token.JUMP
                // Match Other Keywords
                case "end": return Token.END
                case "section": return Token.SECTION
                case ".": return Token.DOT
                // Else to Id
                default: return Token.ID
            }
        }
        function MapSymbols(v){
            switch (v){
                case ":=": return Token.SET
                case "+=": return Token.SELF_ADD
                case "+": return Token.ADD
                case "-=": return Token.SELF_SUB
                case "->": return Token.ARROWR
                case "-": return Token.SUB
                case "*=": return Token.SELF_MUL
                case "*": return Token.MUL
                case "/=": return Token.SELF_DIV
                case "/": return Token.DIV
                case "%=": return Token.SELF_MOD
                case "%": return Token.MOD
                default: return Token.UNKNOWN
            }
        }
        function StateSymbol(ctx){
            function Loop(v=""){
                switch (ctx.c){
                    case '\0': case '(': case '[': case '{': case '}': case ']': case ')':
                        break
                    default: if (' ' < ctx.c){
                        const vnext = v+ctx.c
                        ctx.Next()
                        return Loop(vnext)
                    }
                }
                return v
            }
            function First(){
                function _(t){
                    ctx.Next()
                    return t
                }
                switch (ctx.c){
                    case '(': return _(Token.PAREN_L0)
                    case '[': return _(Token.PAREN_L1)
                    case '{': return _(Token.PAREN_L2)
                    case '}': return _(Token.PAREN_R2)
                    case ']': return _(Token.PAREN_R1)
                    case ')': return _(Token.PAREN_R0)
                    default: return MapSymbols(Loop())
                }
            }
            ctx.Begin().Type(First()).Commit()
        }
        function StateId(ctx){
            function Loop(){
                switch (ctx.c){
                    case '\0': case '(': case '[': case '{': case '}': case ']': case ')':
                    case ':': case '+': case '-': case '*': case '/': case '%': case '=':
                        return
                    default: if (' ' < ctx.c){
                        ctx.CharAdd().Next()
                        Loop()
                    }
                }
            }
            ctx.Begin()
            Loop()
            ctx.Type(MapKeywords(ctx.tv)).Commit()
        }
        function StateComment(ctx){
            while (ctx.NotEofOr('\n')){
                ctx.Next()
            }
            ctx.Next()
        }
        function StatePre(ctx){
            switch (ctx.c){
                case '#': return StateComment(ctx)
                case '(': case '[': case '{': case '}': case ']': case ')':
                case ':': case '+': case '-': case '*': case '/': case '%': case '=':
                    return StateSymbol(ctx)
                default:
                    if (' ' < ctx.c){
                        return StateId(ctx)
                    } else {ctx.Next()}
            }
        }
        const tokens = []
        const ctx = {
            i:0, c:'\0',        // Cur Index/Char
            y:1, x:0,           // Char Row/Column
            ty:0, tx:0,         // Token-Begin Row/Column
            tv:"",              // Token Value
            tt:Token.UNKNOWN,   // Token Type
            Commit(){
                tokens.push(this.tt(this.tx,this.ty,this.tv))
                this.tv = ""
                this.tt = Token.UNKNOWN
                return this
            },
            Type(t){
                this.tt = t
                return this
            },
            CharAdd(){
                this.tv += this.c
                return this
            },
            Begin(){
                this.ty = this.y
                this.tx = this.x
                return this
            },
            NotEofOr(c){
                return !('\0' == this.c || c == this.c)
            },
            NotEof(){
                return '\0' != this.c
            },
            Next(){
                if (this.i < code.length){
                    if ('\n' == this.c){
                        ctx.Begin().Type(Token.ENDLINE).Commit()
                    }
                    this.c = code[this.i]
                    this.i += 1
                    if ('\n' == this.c){
                        this.y += 1
                        this.x = 0
                    } else {
                        this.x += 1
                    }
                } else {
                    this.c = '\0'
                }
                return this
            },
        }
        ctx.Next()
        while (ctx.NotEof()){
            StatePre(ctx)
        }
        ctx.Begin().Type(Token.ENDLINE).Commit()
        return tokens
    }

    // ================================================================
    // Module: Compiler
    // ================================================================
    // Input:
    //      code:string     Source code
    // Output: []
    //      Bytecode array, Use function map to Bytecode
    function Compile(code){
        function PassSyntax(code){
            const tokens = Lexer(code)
            let root = []
            return root
        }
        function PassSemantic(ast){
            return []
        }
        const ast = PassSyntax(code)
        const ir = PassSemantic(ast)
        return ir
    }
    const texts = {
        TokenType(t,v){
            switch (t){
                case Token.PAREN_L0: return "("
                case Token.PAREN_L1: return "["
                case Token.PAREN_L2: return "{"
                case Token.PAREN_R2: return "}"
                case Token.PAREN_R1: return "]"
                case Token.PAREN_R0: return ")"
                case Token.ID: return `ID:${v}`
                case Token.DOT: return "."
                case Token.SET: return ":="
                case Token.SELF_ADD: return "+="
                case Token.SELF_SUB: return "-="
                case Token.SELF_MUL: return "*="
                case Token.SELF_DIV: return "/="
                case Token.SELF_MOD: return "%="
                case Token.ADD: return "+"
                case Token.SUB: return "-"
                case Token.MUL: return "*"
                case Token.DIV: return "/"
                case Token.MOD: return "%"
                case Token.SECTION: return "SECTION"
                case Token.END: return "END"
                case Token.ENDLINE: return "ENDLINE"
                default: return v
            }
        },
        Token(t){
            const y = t.y.toString().padStart(4,'0')
            const x = t.x.toString().padStart(2,'0')
            const v = this.TokenType(t.t, t.v)
            return `${y}:${x} ${v}`
        },
        Tokens(ts){
            const lines = []
            for (const t of ts){
                lines.push(this.Token(t))
            }
            return lines.join('\n')
        },
    }
    function CompileAsText(code){
        const tokens = Lexer(code)
        return texts.Tokens(tokens)
    }
    // ================================================================

    return {Compile, CompileAsText}
})();

