/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package banaanvraag.tools.tokeniser;

import java.util.ArrayList;
import java.util.List;
import banaanvraag.tools.lists.AdvancedArrayList;

/**
 *
 * @author Luc
 */
public abstract class AbstractAdvancedTokeniser {
    
    private List<Token> stage1Tokens;
    private List<Token> stage2Tokens;
    private List<Token> stage3Tokens;
    private List<Token> tokens;
    private AdvancedArrayList<Integer> autoConcat;
    private AdvancedArrayList<Integer> noInclude;
    private AdvancedArrayList<String> specials;
    private int specialType;
    private int specialBaseType;
    private int index;
    private boolean strictSpecials = false;
    
    /**
     * Create a new AdvancedTokeniser for processing tokens to complexer tokens
     * That is, Tokens whose value may contain more than one character and can
     * be built with tokens of a different tokentype
     * @param _tokens The tokens from a SimpleTokeniser
     */
    public AbstractAdvancedTokeniser(List<Token> _tokens) {
        stage1Tokens = _tokens;
        tokens = new ArrayList<Token>();
    }
    
    /** 
     * Group subsequent tokens of the same type
     * @return A token that is made up from other tokens of the same type
     */
    private Token group() {
        Token newToken = new Token(stage1Tokens.get(index).type,
                stage1Tokens.get(index).content, stage2Tokens.size());
        while(index < stage1Tokens.size()) {
            System.out.println(index);
            if(stage1Tokens.get(index + 1).type == newToken.type) {
                newToken.content += stage1Tokens.get(index+1).content;
            }
            else {
                break;
            }
            index++;
        }
        return newToken;
    }
    
    public void stage1() {
        index = 0;
        
        stage2Tokens = new ArrayList<Token>();
        
        // Group and ignore some tokens
        while(index < stage1Tokens.size()) {
            if(autoConcat.contains(stage1Tokens.get(index).type)) {
                stage2Tokens.add(group());
            }
            else if(noInclude.contains(stage1Tokens.get(index).type));
            else {
                stage2Tokens.add(stage1Tokens.get(index));
            }
            index++;
        }
        stage1Tokens = null; // No longer needed, free the memory
    }
    
    /**
     * This is for tokens of the same token type that should get their own type.
     * A good example is for a C-like language, where keywords like 
     * <code>int</code> are important. To prevent them to be recognised like,
     * say, variables they should get their own type. Those keywords are called
     * 'specials' and this function takes care of them.
     * If you want to have multiple token types for different specials use
     * for every type <code>setSpecials</code> and <code>stage2</code>.
     */
    public void stage2() {
        stage3Tokens = new ArrayList<Token>();
        
        index = 0; // And we start over
        // Turn some simple (1 token type) token groups into special tokens
        while(index < stage2Tokens.size()) {
            if(strictSpecials && stage2Tokens.get(index).type == specialBaseType) {
                if(specials.contains(stage2Tokens.get(index).content)) {
                    stage3Tokens.add(new Token(specialType,
                            stage2Tokens.get(index).content,
                            stage3Tokens.size()));
                }
                else {
                    stage3Tokens.add(new Token(stage2Tokens.get(index).type,
                            stage2Tokens.get(index).content,
                            stage3Tokens.size()));
                }
            }
            else if(!strictSpecials && specials.contains(stage2Tokens.get(index).content)) {
                stage3Tokens.add(new Token(specialType,
                        stage2Tokens.get(index).content,
                        stage3Tokens.size()));
            }
            else {
                stage3Tokens.add(new Token(stage2Tokens.get(index).type,
                    stage2Tokens.get(index).content,
                    stage3Tokens.size()));
            }
            index++;
        }
        stage2Tokens = stage3Tokens;
    }
    
    /**
     * Groups <code>setSpecials</code> and <code>stage2</code> in one function
     * @param _specials
     * @param _newTokenType 
     */
    public void parseSpecials(AdvancedArrayList<String> _specials, int _newTokenType) {
        setSpecials(_specials, _newTokenType);
        stage2();
    }
    
    /**
     * Groups <code>setSpecials</code> and <code>stage2</code> in one function
     * @param _specials
     * @param _newTokenType
     * @param _originalTokenType
     * @param _strict 
     */
    public void parseSpecials(AdvancedArrayList<String> _specials, int _newTokenType,
            int _originalTokenType, boolean _strict) {
        setSpecials(_specials, _newTokenType, _originalTokenType, _strict);
        stage2();
    }
    
    /**
     * This is for the really complex tokens. These tokens consist of different
     * token types and require special provided logic.
     * If you don't use this, just use <code>return tokens;</code>
     * @param tokens
     * @return The processed tokens
     */
    public abstract List<Token> stage3(List<Token> tokens);
    
    /**
     * This is a quick tokeniser, using just one special type
     * @return 
     */
    public List<Token> tokenise() {
        
        stage1();
        stage2();
        tokens = stage3(stage3Tokens);
        return tokens;
    }
    
    /**
     * Set the list of tokentypes that should be placed together in one Token
     * if they come after each other.
     * @param _autoConcat The AAL containing the types
     */
    public void setAutoConcat(AdvancedArrayList<Integer> _autoConcat) {
        autoConcat = _autoConcat;
    }
    
    /**
     * Set the tokentypes that should be totally ignored
     * @param _noInclude The AAL containing the types
     */
    public void setNoInclude(AdvancedArrayList<Integer> _noInclude) {
        noInclude = _noInclude;
    }
    
    /**
     * Turn tokens with a certain value into a Token with a different token type
     * @param _specials The AAL containing the values
     * @param _newTokenType The tokentype to give to the new tokens
     */
    public void setSpecials(AdvancedArrayList<String> _specials, int _newTokenType) {
        specialType = _newTokenType;
        specials = _specials;
        strictSpecials = false;
    }
    
    /**
     * Turn tokens with a certain value into a Token with a different token type
     * , but only if the original tokentype equals to <code>_originalTokenType</code>
     * if <code>strict</code> is set to true.
     * @param _specials The AAL containing the values
     * @param _newTokenType The tokentype to give to the new tokens
     * @param _originalTokenType The tokentype that the original tokens must have to comply for specialising
     * @param _strict Set this to true and the original token type will get checked
     */
    public void setSpecials(AdvancedArrayList<String> _specials, int _newTokenType,
            int _originalTokenType, boolean _strict) {
        specialType = _newTokenType;
        specialBaseType = _originalTokenType;
        specials = _specials;
        strictSpecials = _strict;
    }
    
    /**
     * Get the list of tokens to be processed by stage3
     * @return 
     */
    public List<banaanvraag.tools.tokeniser.Token> getStage3Tokens() {
        return stage3Tokens;
    }
}
