package no.uio.ifi.cop.lex;

import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;

import no.uio.ifi.cop.common.Common;
import no.uio.ifi.cop.common.Log;


/**
 * Lex input based on regex-tokens.
 *
 * It should be noted that this class is rather weak in
 * terms of inspecting overlapping regular expressions, i.e. 
 * if your regexps are overlapping, you're in for trouble.
 * <pre>
 *
 *    // Typical use:
 *
 *    private final int  LOWERCASE   = 1;
 *    private final int  UPPERCASE   = 2;
 *    private final int  COMMENT     = 3;
 *    private final int  WHITESPACE  = 4;
 *
 *    lexer = new RegLexer();
 *
 *    // NOTE no ^ (begin) or $ (end) on regex!
 *
 *    lexer.addToken( LOWERCASE, "[a-z][A-Za-z0-9_]*", "LOWERCASE");
 *    lexer.addToken( UPPERCASE, "[A-Z_][A-Za-z0-9_]*", "UPPERCASE");
 *    lexer.addToken( COMMENT, "%[^\n]*", "COMMENT");
 *    lexer.addToken( WHITESPACE, "\\s+", "WHITESPACE");
 *
 *    // these will not be returned
 *    
 *    lexer.ignoreToken(WHITESPACE);
 *    lexer.ignoreToken(COMMENT);
 *
 *    Token tmp;
 *
 *    while(! lexer.empty() ){
 *        tmp = lexer.next();
 *        System.out.println(" token: "+tmp);
 *    }
 *
 *
 * </pre>
 *
 * @version 1.0
 * @author  bjarneh@ifi.uio.no
 */


public class RegLexer{


    int identifier;
    ArrayList<TokenMatcher> matchers;
    LinkedList<Token> tokens;
    ArrayList<Integer> ignored;
    LinkedList<Token> all;


    public RegLexer(){
        identifier  = 0;
        matchers    = new ArrayList<TokenMatcher>(50);
        tokens      = new LinkedList<Token>();
        ignored     = new ArrayList<Integer>(50);
        all         = new LinkedList<Token>();
    }

    public void clear(){
        identifier = 0;
        all.clear();
        tokens.clear();
    }

    public void ignoreToken(int type){
        ignored.add(type);
    }

    public void addToken(int type, String regex, String id){
        matchers.add(new TokenMatcher(type, regex, id));
    }

    public void addToken(int type, String regex){
        matchers.add(new TokenMatcher(type, regex, "N/A"));
    }

    public void lexFile(String filepath){
        try{
            String content = Common.slurp(filepath);
            lexCharSequence(content);
        }catch(IOException iox){
            Log.fatal("%s", iox);
        }
    }

    public void lexCharSequence(CharSequence buffer){

        if( buffer == null || buffer.length() == 0 ){
            throw new RuntimeException("\nRegLexer.lexCharSequence( empty ) -> fail");
        }

        if( all.size() > 0 || tokens.size() > 0 ){ clear(); }

        int start = 0;
        int entireM, nopartial, howlong;
        int stop  = 1;
        int max = buffer.length();
        int matchersize = matchers.size();
        CharSequence slice;
        TokenMatcher tmp = null;
        Token ntoken;

        while( stop <= max ){

            entireM = 0; nopartial = 0;

            slice = buffer.subSequence(start, stop);

            for(TokenMatcher tm : matchers){

                if( tm.entireMatch(slice) ){
                    tmp = tm;
                    entireM++;
                }

                if( tm.length() < slice.length() && ! tm.startMatch(slice) ){
                    nopartial++;
                }
            }

            if( entireM == 1 && tmp != null ){ // slurp as much as possible

                for(howlong = stop + 1; 
                    howlong <= max && tmp.entireMatch(buffer.subSequence(start, howlong));
                    howlong++){ stop = howlong; }

                slice = buffer.subSequence(start, stop);
                ntoken = new Token(tmp.ordinal, identifier++, slice.toString(), tmp.id);
                if(! ignored.contains( tmp.ordinal ) ){ tokens.add(ntoken); }
                all.add(ntoken);

                start = stop;
                stop++;

            }else{

                if( nopartial == matchersize ){
                    StringBuilder sb = new StringBuilder();
                    printValidTokens(sb);
                    sb.append(" <= HERE\n\ninvalid token\n\n");
                    Log.fatalln(sb.toString());
                }

                stop++;

            }
            
            if( entireM > 1 ){ // this should not happen
                Log.fatal("%d tokens matched slice: %s\n", entireM, slice);
            }

        }

        if( start + 1 < max ){
            StringBuilder sb = new StringBuilder();
            printValidTokens(sb);
            sb.append("\n>>>>>>>>>>>>>>>>>>>>>>>>>> not parsed\n");
            sb.append("%s");
            sb.append("\n<<<<<<<<<<<<<<<<<<<<<<<<<< not parsed\n");
            sb.append("\nunable to parse entire file\n");
            Log.fatal(sb.toString(), buffer.subSequence(start, max));
        }

        if( all.size() == 0 ){// special case buffer.length == 1 + fail
            Log.fatal("unable to parse: '%s'\n", buffer.toString());
        }

    }

    public void printValidTokens(int max){
        printValidTokens(null, max);
    }

    public void printValidTokens(){
        printValidTokens(null, all.size() -1);
    }

    public void printValidTokens(StringBuilder sb){
         printValidTokens(sb, all.size() -1);
    }

    public void printValidTokens(StringBuilder sb, int max){

        Token tmp;
        // find actual place to start
        while(max > 0){
            tmp = all.get(max);
            if( ! ignored.contains( tmp.ordinal ) ){
                break;
            }
            max--;
        }

        for(int i = 0; i <= max; i++){
            if( sb == null ){
                System.err.print(all.get(i).content);
            }else{
                sb.append(all.get(i).content);
            }
        }
    }


    public boolean empty(){
        return tokens.size() == 0;
    }

    public Token next(){
        return tokens.poll();
    }

    public Token nextOK(){
        if( empty() ){
            StringBuilder sb = new StringBuilder();
            printValidTokens(sb);
            sb.append(" <= HERE\n\n unexpected end of tokens\n");
            Log.fatal(sb.toString());
        }
        return tokens.poll();
    }

    public Token peek(){
        return tokens.peek();
    }

    public Token peekOK(){
        if( empty() ){
            StringBuilder sb = new StringBuilder();
            printValidTokens(sb);
            sb.append(" <= HERE\n\nunexpected end of tokens\n");
            Log.fatal(sb.toString());
        }
        return tokens.peek();
    }

    public int size(){
        return tokens.size();
    }
}
