// -*- java -*-
//good reading for error handling: https://javacc.dev.java.net/doc/errorrecovery.html
//check Python.asdl for references on the python implementation of the grammar
options
{
    // only non-defaults are given here.
    NODE_SCOPE_HOOK       = true;   // call jjtree*NodeScope()
    NODE_FACTORY          = "jjtree.builder()";
    //NODE_CLASS            = "jjtree.builder()";
    NODE_USES_PARSER      = true;
    STATIC                = false;  // multiple parsers
    COMMON_TOKEN_ACTION   = true;   // CommonTokenAction(Token)
    DEBUG_PARSER          = false;  // must be used to enable tracing 
    DEBUG_LOOKAHEAD       = false;
    DEBUG_TOKEN_MANAGER   = false;  // used to debug the tokens we have generating 

    USER_CHAR_STREAM      = true;
    UNICODE_INPUT         = true;
    ERROR_REPORTING       = true;  // this is NOT used to shut down errors regarding javacc org.python.pydev.parser.jython.PythonGrammar$LookaheadSuccess
                                      // because the generated information is not complete enough if we do it.
    VISITOR_RETURN_TYPE   = "QObject";
    PARSER_INCLUDES       = "PythonTreeBuilder.h";
    PARSER_SUPER_CLASS    = "public PythonGrammar";
}


PARSER_BEGIN(PythonGrammar30)
protected:
    virtual JJTState * getJJTree(){
        return &jjtree;
    }
    virtual TokenPtr getCurrentToken()
    {
        return token;
    }
    virtual void setCurrentToken(const TokenPtr& t)
    {
        token = t;
    }
    void handleFutureImports(const QString & /*importName*/)
    {
    }
    virtual QList<ObjectPtr>* getTokenSourceSpecialTokensList()
    {
        return &token_source->specialTokens;
    }
    virtual TokenPtr getJJLastPos()
    {
        return jj_lastpos;
    }

    virtual TokenManager* getTokenManager()
    {
        return token_source;
    }
    virtual CharStream* getInputStream()
    {
        return jj_input_stream;
    }

PARSER_END(PythonGrammar30)

TOKEN_MGR_DECLS:
{
    //Class<?> getConstantsClass(){
    //    return PythonGrammar30Constants.class;
    //}
    
    /**
     * @return The current level of the indentation in the current line.
     */
    int getCurrentLineIndentation(){
        return indent;
    }
    
    /**
     * @return The current level of the indentation.
     */
    int getLastIndentation(){
        return indentation[level];
    }

    
    void indenting(int ind) {
        indent = ind;
        if (indent == indentation[level])
            SwitchTo(INDENTATION_UNCHANGED);
        else
            SwitchTo(INDENTING);
    }
}


SKIP :
{
    <SPACE: " ">
|   "\t"
|   "\014"
|   <CONTINUATION: ("\\") ("\r\n"|"\n"|"\r")>
|   <NEWLINE1: ("\r\n"|"\n"|"\r")>
        {
            if (parens == 0) {
                indent = 0;
                input_stream->backup(1);
                if (level == 0)
                    SwitchTo(FORCE_NEWLINE1);
                else
                    SwitchTo(FORCE_NEWLINE2);
            }
        }
}


<FORCE_NEWLINE1> TOKEN :
{ <NEWLINE: ("\n" | "\r")> : INDENTATION_UNCHANGED }


<FORCE_NEWLINE2> TOKEN :
{ <NEWLINE2: ("\n" | "\r")> { matchedToken->kind = NEWLINE; }: INDENTING }



<INDENTING, INDENTATION_UNCHANGED> SKIP :
{
    "\t"
        { indenting((indent/8+1)*8); }
|   " "
        { indenting(indent+1); }
|   "\014"
        { indenting(0); }
|   <CRLF1: ("\r\n" | "\n" | "\r")>
        {
            indenting(0);
        }
}


<INDENTATION_UNCHANGED> SKIP :
{
    <""> : DEFAULT
}



<INDENTING> TOKEN :
{
    <DEDENT: "">
        {
            if (indent > indentation[level]) {
                level++;
                indentation[level] = indent;
                matchedToken->kind=INDENT;
                matchedToken->image = "<INDENT>";
            }
            else if (level > 0) {
                TokenPtr t = matchedToken;
                level -= 1;
                while (level > 0 && indent < indentation[level]) {
                    level--;
                    t = addDedent(t);
                }
                if (indent != indentation[level]) {
                    throw QString("inconsistent dedent %1 %2").arg(t->endLine).arg(t->endColumn);
                }
                t->next.clear();
            }
        } : DEFAULT
}




<UNREACHABLE> TOKEN :
{
    < INDENT:"<INDENT>">
//|     < DEDENT:"<DEDENT>">
}


<DEFAULT> SPECIAL_TOKEN: /* COMMENTS 1*/
{
    <TRAILING_COMMENT: "#" (~["\n","\r"])* >{
//    System.out.println("TRAILING_COMMENT "+image);
//    matchedToken.image = image.toString();
    }
}

<INDENTING, INDENTATION_UNCHANGED> SPECIAL_TOKEN: /* COMMENTS 2*/
{
    <SINGLE_LINE_COMMENT: "#" (~["\n","\r"])* ("\r\n" | "\n" | "\r")> {
//    System.out.println("SINGLE_LINE_COMMENT "+image);
//    matchedToken.image = image.toString();
        indenting(0);
    }
}

TOKEN : /* SEPARATORS */
{
    < LPAREN: "(" > {parens++;}
|   < RPAREN: ")" > {parens--;}
|   < LBRACE: "{" > {parens++;}
|   < RBRACE: "}" > {parens--;}
|   < LBRACKET: "[" > {parens++;}
|   < RBRACKET: "]" > {parens--;}
|   < SEMICOLON: ";" >
|   < COMMA: "," >
|   < DOT: "." >
|   < COLON: ":" >
}


TOKEN : /* OPERATORS */
{
    < PLUS: "+" >
|   < MINUS: "-" >
|   < MINUS_GREATER: "->" >
|   < MULTIPLY: "*" >
|   < DIVIDE: "/" >
|   < FLOORDIVIDE: "//" >
|   < POWER: "**" >
|   < LSHIFT: "<<" >
|   < RSHIFT: ">>" >
|   < MODULO: "%" >
|   < NOT: "~" >
|   < XOR: "^" >
|   < OR: "|" >
|   < AND: "&" >
|   < EQUAL: "=" >
|   < GREATER: ">" >
|   < LESS: "<" >
|   < EQEQUAL: "==" >
|   < EQLESS: "<=" >
|   < EQGREATER: ">=" >
|   < NOTEQUAL: "!=" >
|   < PLUSEQ: "+=" >
|   < MINUSEQ: "-=" >
|   < MULTIPLYEQ: "*=" >
|   < DIVIDEEQ: "/=" >
|   < FLOORDIVIDEEQ: "//=" >
|   < MODULOEQ: "%=" >
|   < ANDEQ: "&=" >
|   < OREQ: "|=" >
|   < XOREQ: "^=" >
|   < LSHIFTEQ: "<<=" >
|   < RSHIFTEQ: ">>=" >
|   < POWEREQ: "**=" >
}

TOKEN : /* KEYWORDS */
{
    < OR_BOOL: "or" >
|   < AND_BOOL: "and" >
|   < NOT_BOOL: "not" >
|   < IS: "is" >
|   < IN: "in" >
|   < LAMBDA: "lambda" >
|   < IF: "if" >
|   < ELSE: "else" >
|   < ELIF: "elif" >
|   < WHILE: "while" >
|   < FOR: "for" >
|   < TRY: "try" >
|   < EXCEPT: "except" >
|   < DEF: "def" >
|   < CLASS: "class" >
|   < FINALLY: "finally" >
|   < PASS: "pass" >
|   < BREAK: "break" >
|   < CONTINUE: "continue" >
|   < RETURN: "return" >
|   < YIELD: "yield" >
|   < IMPORT: "import" >
|   < FROM: "from" >
|   < DEL: "del" >
|   < RAISE: "raise" >
|   < GLOBAL: "global" >
|   < NONLOCAL: "nonlocal" >
|   < ASSERT: "assert" >
|   < AS: "as" >
|   < WITH: "with" >
|   < FALSE: "False" >
|   < TRUE: "True" >
|   < NONE: "None" >
|   < AT: "@" >
}


//Python 3.0 can use unicode identifiers. So, the letter construct deals with that...
TOKEN : /* Python identifiers */
{
    < NAME: <LETTER> ( <LETTER> | <DIGIT>)* >
|
    < #LETTER: 
    [
       "a"-"z",
       "A"-"Z",
       "_",
       "\u0080"-"\uffff" //Anything more than 128 is considered valid (unicode range)
    
    ] 
>
}


TOKEN : /* Numeric literals */
{
    < DECNUMBER:
        ["1"-"9"] (["0"-"9"])* (["l", "L"])?
      | "0"
      >
|   < HEXNUMBER: "0" ["x","X"] (["0"-"9","a"-"f","A"-"F"])+ (["l","L"])? >
|   < OCTNUMBER: "0" ["o","O"] (["0"-"7"])* (["l","L"])? >
|   < BINNUMBER: "0" (["b","B"])? (["0"-"1"])* (["l","L"])? >
|
    < FLOAT:
        (["0"-"9"])+ "." (["0"-"9"])* (<EXPONENT>)?
      | "." (["0"-"9"])+ (<EXPONENT>)?
      | (["0"-"9"])+ <EXPONENT>
      >
|   < COMPLEX: (<DECNUMBER> | <FLOAT> | "0" <DECNUMBER> ) ["j", "J"]>
|   < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
|   < #DIGIT: ["0" - "9"] >
}


MORE : /* Strings */
{
    < (["b", "B"]) (["r", "R"])? "'" > :  IN_BSTRING11
|   < (["b", "B"]) (["r", "R"])? "\"" > :  IN_BSTRING21
|   < (["b", "B"]) (["r", "R"])? "'''" > :  IN_BSTRING13
|   < (["b", "B"]) (["r", "R"])? "\"\"\"" > :  IN_BSTRING23
|   < (["r", "R"])? "'" > :  IN_STRING11
|   < (["r", "R"])? "\"" > :  IN_STRING21
|   < (["r", "R"])? "'''" > :  IN_STRING13
|   < (["r", "R"])? "\"\"\"" > :  IN_STRING23
}

<IN_STRING11> TOKEN : { <SINGLE_STRING: "'"> {
    matchedToken->image = image; } : DEFAULT}
<IN_STRING21> TOKEN : { <SINGLE_STRING2: "\""> {
    matchedToken->image = image; } : DEFAULT}
<IN_STRING13> TOKEN : { <TRIPLE_STRING: "'''"> {
    matchedToken->image = image; } : DEFAULT}
<IN_STRING23> TOKEN : { <TRIPLE_STRING2: "\"\"\""> {
    matchedToken->image = image; } : DEFAULT}

<IN_BSTRING11> TOKEN : { <SINGLE_BSTRING: "'"> {
    matchedToken->image = image; } : DEFAULT}
<IN_BSTRING21> TOKEN : { <SINGLE_BSTRING2: "\""> {
    matchedToken->image = image; } : DEFAULT}
<IN_BSTRING13> TOKEN : { <TRIPLE_BSTRING: "'''"> {
    matchedToken->image = image; } : DEFAULT}
<IN_BSTRING23> TOKEN : { <TRIPLE_BSTRING2: "\"\"\""> {
    matchedToken->image = image; } : DEFAULT}

<IN_STRING11> MORE:
{
    <"\\\r\n">           { image.resize(image.length()-3); } : IN_STRING1NLC
|   <("\\" ("\n"|"\r"))> { image.resize(image.length()-2); } : IN_STRING1NLC
}

<IN_STRING21> MORE:
{
    <"\\\r\n">           { image.resize(image.length()-3); } : IN_STRING2NLC
|   <("\\" ("\n"|"\r"))> { image.resize(image.length()-2); } : IN_STRING2NLC
}

<IN_BSTRING11> MORE:
{
    <"\\\r\n">           { image.resize(image.length()-3); } : IN_BSTRING1NLC
|   <("\\" ("\n"|"\r"))> { image.resize(image.length()-2); } : IN_BSTRING1NLC
}

<IN_BSTRING21> MORE:
{
    <"\\\r\n">           { image.resize(image.length()-3); } : IN_BSTRING2NLC
|   <("\\" ("\n"|"\r"))> { image.resize(image.length()-2); } : IN_BSTRING2NLC
}

<IN_STRING1NLC> MORE:
{
 <""> : IN_STRING11
}

<IN_STRING2NLC> MORE:
{
 <""> : IN_STRING21
}

<IN_BSTRING1NLC> MORE:
{
 <""> : IN_BSTRING11
}

<IN_BSTRING2NLC> MORE:
{
 <""> : IN_BSTRING21
}

<IN_STRING11, IN_BSTRING11> MORE: { <("\\" ("\\"|"'")) | ~["\n","\r"]> }
<IN_STRING21, IN_BSTRING21> MORE: { <("\\" ("\\"|"\"")) | ~["\n","\r"]> }

/* This is a test to see if we can make the loading of strings more efficient (and maybe replace the MORE that is declared below.
I stopped this because I've seen that making the CharStream was apparently the number 1 thing to do, but it might be worth
comming back to this approach later).
<IN_STRING23> MORE:
{
    <~[]> 
    {
         try {
                 while(true){
                         char c = input_stream->readChar();
                         image.append(c);
                         int len = image.length();
                         if(len > 3 && image.charAt(len-3) == '"' && image.charAt(len-2) == '"'  && image.charAt(len-1) == '"' ){
                                 input_stream->backup(3);
                                 image.delete(image.length()-3, image.length());
                                 break;
                         }
                 }
         } catch (Exception e) {
             throw new RuntimeException();
         }
    }
}
*/

<IN_STRING13, IN_STRING23, IN_BSTRING13, IN_BSTRING23> MORE:
{
    <"\r\n"> {
        {
        int l = image.length();
        image.resize(l-1);
        image.replace(l-2, 1, '\n');
        }
    }
|   <"\n">
|   <"\r"> { image.replace(image.length()-1, 1, '\n'); }
|   <~["\n","\r"]>
|   <"\\" ~["\n","\r"]>
}



//file_input: (NEWLINE | stmt)* ENDMARKER
modTypePtr file_input(): {}
{
    (try{<NEWLINE>}catch(ParseException e){handleNoNewline(e);} | stmt())* try{<EOF>}catch(ParseException e){handleNoEof(e);}
    { return jjtree.popNode().dynamicCast<modType>(); }
}



//funcdef: 'def' NAME parameters ['->' test] ':' suite
void funcdef(): {}
{ 
    <DEF> {markLastAsSuiteStart();} Name() parameters() [{addSpecialToken("->", STRATEGY_BEFORE_NEXT);}<MINUS_GREATER> test()#funcdef_return_annottation] {findTokenAndAdd(":");}<COLON>
        suite() 
}



//decorators: decorator+
//decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
void decorators(): {}
{
    (begin_decorator() [<LPAREN>  {addSpecialToken("(", STRATEGY_BEFORE_NEXT);} [arglist()] try{{findTokenAndAdd(")");}<RPAREN> }catch(ParseException e){handleRParensNearButNotCurrent(e);} ] try{<NEWLINE>}catch(ParseException e){handleNoNewline(e);} )+
}


void begin_decorator(): {}
{ temporaryToken=<AT>  {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);} dotted_name()
}

//parameters: '(' [typedargslist] ')'
void parameters() #void:  {}
{ {findTokenAndAdd("(");}<LPAREN>
  [LOOKAHEAD({ getToken(1)->kind != RPAREN }) typedargslist()]
  try{{findTokenAndAdd(")");}<RPAREN> }catch(ParseException e){handleRParensNearButNotCurrent(e);}
}
  
//typedargslist: ((tfpdef ['=' test] ',')*
//                ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef)
//                | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
void typedargslist() #void: {}
{
// Doing the exact same does not yield good results for javacc, so, we have to work with an alternative specification
// (that does the same thing)
//       (LOOKAHEAD(2) (defaultarg2() <COMMA>)*
//        (ExtraArgList2() (<COMMA> defaultarg2())* [<COMMA> ExtraKeywordList2()] | ExtraKeywordList2())
//        | defaultarg2() (<COMMA> defaultarg2())* [<COMMA>])


       ((defaultarg2() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> defaultarg2())*) [LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> ((ExtraArgList2() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> onlykeywordarg2())* [{findTokenAndAdd(",");}<COMMA> ExtraKeywordList2()]) | (ExtraKeywordList2()) )]) [{findTokenAndAdd(",");}<COMMA>]
    |  (ExtraArgList2() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> onlykeywordarg2())* [LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> ExtraKeywordList2()]) [{findTokenAndAdd(",");}<COMMA>]
    |   ExtraKeywordList2() [{findTokenAndAdd(",");}<COMMA>]
}

void ExtraArgList2(): {}
{
        <MULTIPLY> {addSpecialToken("*", STRATEGY_BEFORE_NEXT);} [tfpdef()]
}

void ExtraKeywordList2(): {}
{
        <POWER> {addSpecialToken("**", STRATEGY_BEFORE_NEXT);} tfpdef()
}

void defaultarg2(): {}
{ tfpdef() [temporaryToken=<EQUAL> {addSpecialToArgDef(temporaryToken);}  test()] }


void onlykeywordarg2(): {}
{ tfpdef() [temporaryToken=<EQUAL> {addSpecialToArgDef(temporaryToken);}  test()] }


//tfpdef: NAME [':' test]
void tfpdef(): {}
{ 
        Name() [LOOKAHEAD(2)<COLON> {addSpecialToken(":", STRATEGY_BEFORE_NEXT);} test()]
}


//varargslist: ((vfpdef ['=' test] ',')*
//              ('*' [vfpdef] (',' vfpdef ['=' test])*  [',' '**' vfpdef] | '**' vfpdef)
//              | vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
void varargslist() #void: {}
{
       ((defaultarg() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> defaultarg())*) [LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> ((ExtraArgList() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> onlykeywordarg())* [{findTokenAndAdd(",");}<COMMA> ExtraKeywordList()]) | (ExtraKeywordList()) )]) [{findTokenAndAdd(",");}<COMMA>]
    |  (ExtraArgList() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> onlykeywordarg())* [LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> ExtraKeywordList()]) [{findTokenAndAdd(",");}<COMMA>]
    |   ExtraKeywordList() [{findTokenAndAdd(",");}<COMMA>]
}

void ExtraArgList(): {}
{ <MULTIPLY> {addSpecialToken("*", STRATEGY_BEFORE_NEXT);} [Name()] }

void ExtraKeywordList(): {}
{ (<POWER>{addSpecialToken("**", STRATEGY_BEFORE_NEXT);}|<MULTIPLY> {addSpecialToken("*", STRATEGY_BEFORE_NEXT);}{addSpecialToken("*", STRATEGY_BEFORE_NEXT);} <MULTIPLY>) Name() }

void onlykeywordarg(): {}
{ fpdef() [temporaryToken=<EQUAL> {addSpecialToArgDef(temporaryToken);}  test()] }

void defaultarg(): {}
{ fpdef() [temporaryToken=<EQUAL> {addSpecialToArgDef(temporaryToken);}  test()] }

//fpdef: NAME | '(' fplist ')'
void fpdef() #void: {}
{ Name() | {temporaryToken=createSpecialStr("(");}<LPAREN>  {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);}   fplist() try{{findTokenAndAdd(")");}<RPAREN> }catch(ParseException e){handleRParensNearButNotCurrent(e);}  }

//fplist: fpdef (',' fpdef)* [',']
void fplist() #tuple: {}
{ fpdef() (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> fpdef())* [Comma()] }



//stmt: simple_stmt | compound_stmt
void stmt() #void: {}
{ 
        simple_stmt() 
    | 
        try{
            compound_stmt()
        }catch(ParseException e){
            handleErrorInCompountStmt(e);
        } 
}




//simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
void simple_stmt() #void: {}
{ 
    small_stmt() (LOOKAHEAD(2) temporaryToken=<SEMICOLON>{addSpecialToken(temporaryToken);} small_stmt())*
    [temporaryToken=<SEMICOLON>{addSpecialToken(temporaryToken);}]
    try{<NEWLINE>}catch(ParseException e){handleNoNewline(e);}
}



//small_stmt: expr_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | nonlocal_stmt | assert_stmt 
void small_stmt() #void: {TokenPtr spStr;}
{
    expr_stmt()
|   del_stmt()
|   spStr = pass_stmt() {addToPeek(spStr, false); }
|   flow_stmt()
|   import_stmt()
|   global_stmt()
|   nonlocal_stmt()
|   temporaryToken=<ASSERT> assert_stmt() {addToPeek(temporaryToken, false); }

}

//expr_stmt: testlist (augassign (yield_expr|testlist) |
//                     ('=' (yield_expr|testlist))*)
void expr_stmt() #void: {}
{
    testlist_star_expr() (
    <PLUSEQ>        (yield_expr()|SmartTestList())   #aug_plus(2)
|   <MINUSEQ>       (yield_expr()|SmartTestList())   #aug_minus(2)
|   <MULTIPLYEQ>    (yield_expr()|SmartTestList())   #aug_multiply(2)
|   <DIVIDEEQ>      (yield_expr()|SmartTestList())   #aug_divide(2)
|   <FLOORDIVIDEEQ> (yield_expr()|SmartTestList())   #aug_floordivide(2)
|   <MODULOEQ>      (yield_expr()|SmartTestList())   #aug_modulo(2)
|   <ANDEQ>         (yield_expr()|SmartTestList())   #aug_and(2)
|   <OREQ>          (yield_expr()|SmartTestList())   #aug_or(2)
|   <XOREQ>         (yield_expr()|SmartTestList())   #aug_xor(2)
|   <LSHIFTEQ>      (yield_expr()|SmartTestList())   #aug_lshift(2)
|   <RSHIFTEQ>      (yield_expr()|SmartTestList())   #aug_rshift(2)
|   <POWEREQ>       (yield_expr()|SmartTestList())   #aug_power(2)
|  (<EQUAL>         (yield_expr()|testlist_star_expr()))* #expr_stmt(jjtree.nodeArity()+1))

}


//del_stmt: 'del' exprlist
void del_stmt(): {}
{ begin_del_stmt() exprlist() }

void begin_del_stmt(): {}
{ temporaryToken=<DEL> {addToPeek(temporaryToken,false);}
}



//pass_stmt: 'pass'
TokenPtr pass_stmt(): {TokenPtr spStr(NULL);}
{ spStr=<PASS> {return spStr;}}


//flow_stmt: break_stmt | continue_stmt | return_stmt | yield_stmt | raise_stmt
void flow_stmt() #void: {}
{
    <BREAK> {addToPeek("break",true);} #break_stmt(0)
|   <CONTINUE>  {addToPeek("continue",true);} #continue_stmt(0)
|   return_stmt()
|   yield_stmt()
|   raise_stmt()
}

//return_stmt: 'return' [testlist]
void return_stmt(): {}
{ begin_return_stmt() [SmartTestList()] }

void begin_return_stmt(): {}
{ <RETURN> {addToPeek("return ",false);}
}


//yield_stmt: yield_expr
void yield_stmt(): {}
{ yield_expr() }


//yield_expr: 'yield' [testlist]
void yield_expr(): {TokenPtr spStr;}
{ spStr=<YIELD> [SmartTestList()] {addToPeek(spStr, false, Ast::Yield);}}


//raise_stmt: 'raise' [test ['from' test]]
void raise_stmt(): {}
{ {temporaryToken=createSpecialStr("raise");}<RAISE> {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);} [test() [{addSpecialToken(" from ");} <FROM> test()]] }



//import_stmt: 'import' dotted_name (',' dotted_name)* | 'from' dotted_name 'import' ('*' | NAME (',' NAME)*)
void import_stmt() #void: {ImportPtr imp; ObjectPtr spStr;}
{  
    try{
        spStr=<IMPORT> imp = Import() {imp->addSpecial(spStr,false);}
        |
        {temporaryToken=createSpecialStr("from");}<FROM> {addSpecialToken(temporaryToken,STRATEGY_BEFORE_NEXT);} ImportFrom()
    }catch(ParseException e){handleErrorInImport(e);}
}



ImportPtr Import(): {}
{ dotted_as_name() ({findTokenAndAdd(",");}<COMMA> dotted_as_name())*
  {return jjtree.peekNode< ::Import>();}
}

//import_from: ('from' ('.'* dotted_name | '.'+)
//              'import' ('*' | '(' import_as_names ')' | import_as_names))
void ImportFrom(): { int level=0; int state=0; QString fromName=""; QString importName=""; }
{
    //we need to set the  {findTokenAndAdd("import");}<IMPORT> in both otherwise the lookahead will not work as we want it to work
    //because it confuses the import with the dotted name
     (("." {level++;} )* (fromName=dotted_name())? )  {if(fromName.isEmpty() && level==0){throw new ParseException("Expecting to find '.' or name in import.");}}
     {findTokenAndAdd("import");}<IMPORT>
    
    (
        //from xxx import *
        <MULTIPLY> {addSpecialToken("*",STRATEGY_ADD_AFTER_PREV);}//from xx import *
        
        //from xxx import a,b,c
        | (importName=import_as_name() {if(!fromName.isEmpty() && fromName == "__future__")handleFutureImports(importName);}
            ({findTokenAndAdd(",");}<COMMA> (importName=import_as_name()){if(!fromName.isEmpty() && fromName == "__future__")handleFutureImports(importName);})*
          ) 
        
        //from xxx import (a,b,c)
        | {temporaryToken=createSpecialStr("(");}<LPAREN>  {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);}
          (importName=import_as_name()){if(!fromName.isEmpty() && fromName == "__future__")handleFutureImports(importName);}
          (
           ({
             if(state!=0){
                 throw new ParseException("Invalid syntax: 2 commas cannot be grouped.", getToken(1));
             }
             state=1; 
             } 
             {findTokenAndAdd(",");}<COMMA> ( {state=0;} (importName=import_as_name(){if(!fromName.isEmpty() && fromName == "__future__")handleFutureImports(importName);}))? )*
           try{{findTokenAndAdd(")");}<RPAREN> }catch(ParseException e){handleRParensNearButNotCurrent(e);}
          )
    )
    //now, let's set the correct level for the module
    {jjtree.peekNode< ::ImportFrom>()->setLevel(level);}
}

//dotted_as_name: dotted_name [NAME NAME]
void dotted_as_name(): {}
{ dotted_name() [{findTokenAndAdd("as");}<AS> Name()] }


//dotted_name: NAME ('.' NAME)*
QString dotted_name(): { TokenPtr t; FastStringBuffer sb = dottedNameStringBuffer.clear(); }
{ t=Name() { sb.append(t->image); }
    (<DOT> t=Name() { sb.append(".").append(t->image); } )*
        { return sb.toString(); }
}


//import_as_name: NAME [NAME NAME]
QString import_as_name(): { TokenPtr t; }
{ t=Name() [{findTokenAndAdd("as");}<AS> Name()] { return t->image; } }

//global_stmt: 'global' NAME (',' NAME)*
void global_stmt(): {}
{ temporaryToken=<GLOBAL> {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);} Name() ({findTokenAndAdd(",");}<COMMA> Name())* }


//nonlocal_stmt: 'nonlocal' NAME (',' NAME)* [','] ['=' testlist]
void nonlocal_stmt(): {}
{ <NONLOCAL> {addSpecialToken("nonlocal ", STRATEGY_BEFORE_NEXT);} Name() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> Name())* [{findTokenAndAdd(",");}<COMMA>] [{temporaryToken=createSpecialStr("=");}<EQUAL> {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);} testlist()]}



//assert_stmt: 'assert' test [',' test]
void assert_stmt(): {}
{ test() [{findTokenAndAdd(",");}<COMMA> test()] }



//compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef
void compound_stmt() #void : {}
{
    if_stmt() | while_stmt() | for_stmt() | try_stmt() | with_stmt() | funcdef() | classdef() | decorated()
}



//if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
void if_stmt(): {QList<ObjectPtr> elseToks;}
{
    temporaryToken=<IF> {markLastAsSuiteStart();} {addSpecialTokenToLastOpened(temporaryToken);} test() {findTokenAndAdd(":");}<COLON> suite()
         (begin_elif_stmt() test() {findTokenAndAdd(":");}<COLON> suite())*
             [ elseToks=begin_else_stmt() suite() {addToPeek(elseToks[0], false, Ast::Suite);addToPeek(elseToks[1], false, Ast::Suite);}]
}




void begin_elif_stmt(): {}
{ <ELIF> {addToPeek("elif",false);}
}


//while_stmt: 'while' test ':' suite ['else' ':' suite]
void while_stmt(): {QList<ObjectPtr> elseToks;}
{ begin_while_stmt() test() {findTokenAndAdd(":");}<COLON> suite()
  [ elseToks=begin_else_stmt()  suite() {addToPeek(elseToks[0], false, Ast::Suite);addToPeek(elseToks[1], false, Ast::Suite);}] }

void begin_while_stmt(): {}
{ temporaryToken=<WHILE>{addSpecialToken(temporaryToken,STRATEGY_BEFORE_NEXT);} {markLastAsSuiteStart();}
}



QList<ObjectPtr> begin_else_stmt(): {ObjectPtr o1, o2;}
{ o1=<ELSE> o2=<COLON>{QList<ObjectPtr> list; list.append(o1); list.append(o2); return list;}
}



//for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
void for_stmt(): {}
{   <FOR> {markLastAsSuiteStart();} {addSpecialTokenToLastOpened("for ");} exprlist() {findTokenAndAdd("in");}<IN>  SmartTestList() {findTokenAndAdd(":");}<COLON> suite()
    [begin_for_else_stmt() suite()]
    
} 


void begin_for_else_stmt(): {}
{ <ELSE> {addSpecialToken("else",STRATEGY_BEFORE_NEXT);} {addSpecialToken(":",STRATEGY_BEFORE_NEXT);} <COLON>
}
//try_stmt: ('try' ':' suite (except_clause ':' suite)+ #diagram:break
//           ['else' ':' suite] | 'try' ':' suite 'finally' ':' suite)
void try_stmt() #void: {SimpleNodePtr tryNode;int i=0;}
{ 
    begin_try_stmt() {tryNode = (SimpleNodePtr)jjtree.peekNode();}  suite() (
        (
            (except_clause(tryNode) {i++;})+ 
    
            [begin_try_else_stmt() suite() {i++;} #tryelse_stmt(2) ]
            
            [begin_finally_stmt()  suite() {i++;} #tryfinally_outer_stmt(2)]
            
            #try_stmt(i)
        )
        
        | begin_finally_stmt() suite()
            #tryfinally_stmt(jjtree.nodeArity()+1)
    )
}

//this is the 'try' ':'  it is needed because we need that scope closing for getting the specials.
void begin_try_stmt(): {}
{ <TRY> {markLastAsSuiteStart();} {addSpecialToken("try", STRATEGY_BEFORE_NEXT);} {addSpecialToken(":", STRATEGY_BEFORE_NEXT);} <COLON>
}
void begin_try_else_stmt(): {}
{ <ELSE> {addSpecialToken("else", STRATEGY_BEFORE_NEXT);}{addSpecialToken(":", STRATEGY_BEFORE_NEXT);}<COLON>
}

void begin_finally_stmt(): {}
{ <FINALLY> {addSpecialToken("finally", STRATEGY_BEFORE_NEXT);} {addSpecialToken(":", STRATEGY_BEFORE_NEXT);} <COLON>
}

//except_clause: 'except' [test [as test]]
void except_clause(SimpleNodePtr tryNode): {}
{ begin_except_clause() {addToPeek("except",false);} [test() [{findTokenAndAdd("as");}<AS> test()]] {findTokenAndAdd(":");}<COLON> suite() }

void begin_except_clause(): {}
{ <EXCEPT> }



//with_stmt: 'with' with_item (',' with_item)*  ':' suite
void with_stmt(): {}
{ <WITH> 
    {addSpecialToken("with ", STRATEGY_BEFORE_NEXT); }
    
    with_item()
    ({findTokenAndAdd(",");}<COMMA> with_item())*
    
    {findTokenAndAdd(":");}<COLON> suite()
}

//with_item: test ['as' expr]
void with_item():{}
{ test() [{temporaryToken=createSpecialStr("as");}<AS> {addSpecialToken(temporaryToken, STRATEGY_BEFORE_NEXT);} expr()]}





//suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
void suite(): {}
{ 

try{
        simple_stmt() 
    |  
    
        try{try{<NEWLINE>}catch(ParseException e){handleNoNewline(e);}<INDENT>}catch(ParseException e){handleErrorInIndent(e);}
        
        (try{stmt()}catch(ParseException e){handleErrorInStmt(e);})+
        
        try{<DEDENT>}catch(ParseException e){handleErrorInDedent(e);}
    
    |
        <INDENT>
        {handleNoNewlineInSuiteFound();} //this only happens when we already had some error!
        
        (try{stmt()}catch(ParseException e){handleErrorInStmt(e);})+
        
        try{<DEDENT>}catch(ParseException e){handleErrorInDedent(e);}
    
        

}catch(ParseException e){
    handleNoSuiteMatch(e);
    
//}catch(EmptySuiteException e){
    /*Just ignore: This was thrown in the handleErrorInIndent*/
}


}


//test: or_test ['if' or_test 'else' test] | lambdef
void test(): {}
{  lambdef() | or_test() [if_exp()] }

//test_nocond: or_test | lambdef_nocond
void test_nocond() #void: {}
{  or_test() | lambdef_nocond() }


void if_exp():{}
{{temporaryToken=createSpecialStr("if");}<IF> {addSpecialToken(temporaryToken,STRATEGY_ADD_AFTER_PREV);} or_test() {findTokenAndAdd("else");}<ELSE> test()}

//or_test: and_test ('or' and_test)*
void or_test() #or_boolean(>1): {}
{ and_test() (<OR_BOOL> and_test())* }


//and_test: not_test ('and' not_test)*
void and_test() #and_boolean(>1): {}
{ not_test() (<AND_BOOL> not_test())* }

//not_test: 'not' not_test | comparison
void not_test() #void: {}
{ <NOT_BOOL> not_test() #not_1op(1) | comparison() }

//comparison: expr (comp_op expr)*
void comparison() #void: {}
{ (expr() (comp_op() expr())*) #comparision(>2) }


//comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
void comp_op() #void: {}
{
    <LESS> #less_cmp(0)
|   <GREATER> #greater_cmp(0)
|   <EQEQUAL> #equal_cmp(0)
|   <EQGREATER> #greater_equal_cmp(0)
|   <EQLESS> #less_equal_cmp(0)
|   <NOTEQUAL> #notequal_cmp(0)
|   <IN> #in_cmp(0)
|   <NOT_BOOL> <IN> #not_in_cmp(0)
|   LOOKAHEAD(2) <IS> <NOT_BOOL> #is_not_cmp(0)
|   <IS> #is_cmp(0)
}

//expr: xor_expr ('|' xor_expr)*
void expr() #void : {}
{ xor_expr() (<OR> xor_expr() #or_2op(2))* }

//xor_expr: and_expr ('^' and_expr)*
void xor_expr() #void : {}
{ and_expr() (<XOR> and_expr() #xor_2op(2))* }

//and_expr: shift_expr ('&' shift_expr)*
void and_expr() #void : {}
{ shift_expr() (<AND> shift_expr() #and_2op(2))* }

//shift_expr: arith_expr (('<<'|'>>') arith_expr)*
void shift_expr() #void : {}
{
    arith_expr() (<LSHIFT> arith_expr() #lshift_2op(2)
|   <RSHIFT> arith_expr() #rshift_2op(2) )*
}

//arith_expr: term (('+'|'-') term)*
void arith_expr() #void : {}
{
    term() (<PLUS> term() #add_2op(2)
|   <MINUS> term() #sub_2op(2) )*
}

//term: factor (('*'|'/'|'%') factor)*
void term() #void : {}
{
    factor()  ( <MULTIPLY> factor() #mul_2op(2)
|   <DIVIDE> factor() #div_2op(2)
|   <FLOORDIVIDE> factor() #floordiv_2op(2)
|   <MODULO> factor() #mod_2op(2) )*
}

//factor: ('+'|'-'|'~') factor | power
void factor() #void: {}
{
    <PLUS> factor() #pos_1op(1)
|   <MINUS> factor() #neg_1op(1)
|   <NOT> factor() #invert_1op(1)
|   power()
} /*Modified, no recursion*/

//power: atom trailer* ('**' factor)*
void power() #void: {}
{ atom() (trailer())* (LOOKAHEAD(2) <POWER> factor() #pow_2op(2))* }

//trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
void trailer() #void: {ObjectPtr spStr;ObjectPtr spStr2;}
{
 
    ({spStr  = createSpecialStr("(", false);} <LPAREN>
    [arglist()] 
    <RPAREN> {spStr2 = createSpecialStr(")", false);}
    )#Call_Op(jjtree.nodeArity()+1) {addToPeekCallFunc(spStr, true); addToPeek(spStr2, true);}


|   ({spStr = createSpecialStr("[", false);} <LBRACKET>
    subscriptlist()
    <RBRACKET> {spStr2 = createSpecialStr("]", false);}
    )#Index_Op(2) {addToPeek(spStr, false); addToPeek(spStr2, true);}

|   <DOT> Name() #Dot_Op(2) 
}



//atom: ('(' [yield_expr|testlist_comp] ')' |
//       '[' [testlist_comp] ']' |
//       '{' [dictorsetmaker] '}' |
//       NAME | NUMBER | STRING+ | '...' | 'None' | 'True' | 'False')
void atom() #void: {ObjectPtr spStr;ObjectPtr spStr2;}
{
    LOOKAHEAD(2) (  
      {spStr  = createSpecialStr("(", false);} <LPAREN>
      {spStr2 = createSpecialStr(")", false);} <RPAREN>
    ) #tuple {addToPeek(spStr, false); addToPeek(spStr2, true);}

|   LOOKAHEAD(2) (  
      {spStr  = createSpecialStr("(", false);} <LPAREN>
      (yield_expr() | testlist_comp())
      {spStr2 = createSpecialStr(")", false);} <RPAREN>
    ) #tuple {addToPeek(spStr, false); addToPeek(spStr2, true);}


|   ( {spStr = createSpecialStr("[", false);} <LBRACKET>
      [testlist_comp()] 
      {spStr2 = createSpecialStr("]", false);} <RBRACKET>
    ) #list {addToPeek(spStr, false); addToPeek(spStr2, true);}
      
      
|   ( {spStr  = createSpecialStr("{", false);}<LBRACE>
      [dictorsetmaker()] 
      {spStr2  = createSpecialStr("}", false);} <RBRACE>
    ) #dictionary {addToPeek(spStr, false); addToPeek(spStr2, true);}
    
|   (<FALSE>)#False
|   (<TRUE>)#True
|   (<NONE>)#None
|   (<DOT> <DOT> <DOT>)#Ellipsis_as_name
|   Name() 
|   Number()
|   String() (String() #strjoin(2))*
}

//lambdef: 'lambda' [varargslist] ':' test
//we add the colon to the args if there is some argument... otherwise, we add it to the first token that appears on the test
void lambdef():{bool hasArgs=false;}
{ <LAMBDA> [varargslist(){hasArgs=true;}] {temporaryToken=createSpecialStr(":");}<COLON> {
if(hasArgs)
    addSpecialToken(temporaryToken);
else 
    addSpecialToken(temporaryToken,STRATEGY_BEFORE_NEXT);}
 test() }

//lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
void lambdef_nocond():{bool hasArgs=false;}
{ <LAMBDA> [varargslist(){hasArgs=true;}] {temporaryToken=createSpecialStr(":");}<COLON> {
if(hasArgs)
    addSpecialToken(temporaryToken);
else 
    addSpecialToken(temporaryToken,STRATEGY_BEFORE_NEXT);}
 test_nocond() }


//subscriptlist: subscript (',' subscript)* [',']
void subscriptlist() #void: {}
{ (subscript() (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> subscript())* [Comma()]) #subscriptlist(>1) }

//subscript: '.' '.' '.' | test | [test] ':' [test] [sliceop]
void subscript() #void: {}
{
    <DOT> <DOT> <DOT> #Ellipsis
|   (test() (slice())?) #Slice
|   slice() #Slice(>0)
}


//sliceop: ':' [test]
void slice() #void: {}
{ Colon() [test()] (Colon() [test()])? }



void Colon(): {} {{addSpecialToken(createSpecialStr(":", false), STRATEGY_BEFORE_NEXT);} <COLON> }
void Comma(): {} {<COMMA>}

//exprlist: expr (',' expr)* [',']
void exprlist() #void: {}
{ ((expr()|star_expr()) (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> (expr()|star_expr()))* [Comma()]) #tuple(>1) }

//testlist: test (',' test)* [',']
void SmartTestList() #void: {}
{ (test() (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> test())* [Comma()]) #tuple(>1) }

//testlist: test (',' test)* [',']
void testlist() #void: {}
{ test() (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> test())* [{findTokenAndAdd(",");}<COMMA>]}

//testlist_star_expr: test (',' test)* [',']
void testlist_star_expr() #void: {}
{ ( (test()|star_expr()) (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA>  (test()|star_expr()))* [Comma()]) #tuple(>1) }

void star_expr(): {}
{<MULTIPLY> {addSpecialToken("*", STRATEGY_BEFORE_NEXT);} expr()}



//dictorsetmaker: ( 
//                   (test ':' test (comp_for | (',' test ':' test)* [','])) 
//                  |(test (comp_for | (',' test)* [','])) 
//                )
void dictorsetmaker() #void: {}
{
    test()
    
    (
        ( 
            {findTokenAndAdd(":");}<COLON>
            try{
                test()
            }catch(ParseException e){
                handleNoValInDict(e);
            } 
            (
                comp_for()
                |
                (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> test(){findTokenAndAdd(":");}<COLON> test())*[{findTokenAndAdd(",");}<COMMA>]
            )
        )
        |
        (
          (LOOKAHEAD(2) comp_for() | ({findTokenAndAdd(",");}<COMMA> [test()])* #set)
        )
    )
}




//testlist_comp: test ( comp_for | (',' test)* [','] )
void testlist_comp() #void: {}
{ test() ( LOOKAHEAD(2)(comp_for())+ | (LOOKAHEAD(2) {findTokenAndAdd(",");}<COMMA> test())* [Comma()] #tuple(>1)) }


//comp_iter: [comp_for | comp_if]
void comp_iter() #void: {}
{ comp_for() | comp_if() }

//comp_for: 'for' exprlist 'in' or_test [comp_iter]
void comp_for(): {}
{ {findTokenAndAdd("for");}<FOR> exprlist() {findTokenAndAdd("in");}<IN>  or_test() [comp_iter()] }

//comp_if: 'if' test_nocond [comp_iter]
void comp_if()#void:{}
{ {findTokenAndAdd("if");}<IF> test_nocond() [comp_iter()]}


//decorated: decorators (classdef | funcdef)
void decorated():{}
{
    decorators() (classdef()|funcdef())
}

//classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
void classdef(): {}
{
    <CLASS> {markLastAsSuiteStart();} Name() [{temporaryToken=createSpecialStr("(");}<LPAREN>  {addSpecialToken(temporaryToken, STRATEGY_ADD_AFTER_PREV);}   [arglist()] try{{findTokenAndAdd(")");}<RPAREN> }catch(ParseException e){handleRParensNearButNotCurrent(e);} ] {findTokenAndAdd(":");}<COLON>
        suite()
}

//arglist: (argument ',')* (argument [',']
//                         |'*' test (',' argument)* [',' '**' test] 
//                         |'**' test)
void arglist() #void: {}
{
       ((argument() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> argument())*) [LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> ((ExtraArgValueList() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> argument())* [{findTokenAndAdd(",");}<COMMA> ExtraKeywordValueList()]) | (ExtraKeywordValueList()) )]) [{findTokenAndAdd(",");}<COMMA>]
    |  (ExtraArgValueList() (LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> argument())* [LOOKAHEAD(2){findTokenAndAdd(",");}<COMMA> ExtraKeywordValueList()]) [{findTokenAndAdd(",");}<COMMA>]
    |   ExtraKeywordValueList() [{findTokenAndAdd(",");}<COMMA>]
}


void ExtraArgValueList(): {}
{ {addSpecialToken(createSpecialStr("*", false));} <MULTIPLY> test() }

void ExtraKeywordValueList(): {}
{ {addSpecialToken(createSpecialStr("**", false));} <POWER> test() }


//argument: test [comp_for] | test '=' test  # Really [keyword '='] test
void argument(): {}
{ 
	   test() (LOOKAHEAD(2)  (Keyword()) |  [comp_for()])
}

void Keyword() : {}
{
        {findTokenAndAdd("=");}<EQUAL> test()
}

void Number() #Num :
{
    TokenPtr t;
}
{
    (
        t=<HEXNUMBER> {
            QString s = t->image.mid(2, t->image.length());
            makeInt(s, 16, t, jjtThis);
        } {}
    )
|    (
        t=<BINNUMBER> {
            QString s = t->image.mid(2, t->image.length());
            makeInt(s, 2, t, jjtThis);
        } {}
    )
|   (
        t=<OCTNUMBER> {
            QString s = t->image.mid(2, t->image.length());
            makeInt(s, 8, t, jjtThis);
        } {}
    )
|   (
        t=<DECNUMBER> { makeInt(t->image, 10, t, jjtThis); } {}
    )
|   (
        t=<FLOAT> { makeFloat(t, jjtThis); } {}
    )
|   (
        t=<COMPLEX> { makeComplex(t, jjtThis); } {}
    )
}


void Complex(): {}
{ <FLOAT>   }



TokenPtr Name() #Name:
{
    TokenPtr t;
}
{
    try{
        (t = <NAME>) 
    }catch(ParseException e){
        t = handleErrorInName(e);
    }
    

        { jjtThis.dynamicCast< ::Name>()->setId(t->image); } {return t;}

}



void String() #void :
{
    TokenPtr t;
}
{
    ( t=<SINGLE_STRING> { makeString(t->image, 1, jjtThis); } {} )#String
|   ( t=<SINGLE_STRING2> { makeString(t->image, 1, jjtThis); } {} )#String
|   ( t=<TRIPLE_STRING> { makeString(t->image, 3, jjtThis); } {} )#String
|   ( t=<TRIPLE_STRING2> { makeString(t->image, 3, jjtThis); } {} )#String
|   ( t=<SINGLE_BSTRING> { makeString(t->image, 1, jjtThis); } {} )#Binary
|   ( t=<SINGLE_BSTRING2> { makeString(t->image, 1, jjtThis); } {} )#Binary
|   ( t=<TRIPLE_BSTRING> { makeString(t->image, 3, jjtThis); } {} )#Binary
|   ( t=<TRIPLE_BSTRING2> { makeString(t->image, 3, jjtThis); } {} )#Binary
}

