/*
 * ModelCC, distributed under ModelCC Shared Software License, www.modelcc.org
 */

package org.modelcc.lexer.flex;

import java.io.Reader;
import java.io.Serializable;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.modelcc.language.lexis.LexicalSpecification;
import org.modelcc.language.lexis.TokenOption;
import org.modelcc.language.lexis.TokenSpecification;
import org.modelcc.lexer.LexicalGraph;
import org.modelcc.lexer.Token;
import org.modelcc.lexer.recognizer.MatchedObject;

/**
 * FLex - Fast Lexer
 * 
 * @author Fernando Berzal (berzal@acm.org)
 */
public class Flex implements Serializable 
{
	/** 
	 * Input string
	 */
	String inputs;
    
    /**
     * Token list.
     */
    List<Token> tokens;
    
    /**
     * Builds a token, filling its data, and validates it.
     * @param t token to be built.
     * @return true if the token is valid, false if not
     */
    private boolean build(TokenSpecification m,Token t) 
    {
        return m.getBuilder().build(t);
    }


    /**
     * Performs lexical analysis.
     * @param ls the lexer specification.
     * @param input the input string.
     * @return the resulting lexical graph.
     */
    public LexicalGraph scan(LexicalSpecification ls, Reader input) 
    {
    	inputs = readString(input);
        tokens = new ArrayList<Token>();
        
        List<TokenSpecification> stspecs = ls.getTokenSpecifications();
        Map<TokenSpecification,Set<TokenSpecification>> precedences = ls.getPrecedences();
                
        scanInput (stspecs, precedences);

        return buildLexicalGraph();
    }


    // Read input string
   
	private String readString(Reader input) 
	{
		String inputs;
		int n;
        Writer writer = new StringWriter();
        char[] buffer = new char[1024];
        try {
            while ((n = input.read(buffer)) != -1) {
                writer.write(buffer, 0, n);
            }
        } catch (Exception e) {
        }
        
        inputs = writer.toString();
		return inputs;
	}


    // Scanning phase
    // --------------

	private void scanInput (
			List<TokenSpecification> stspecs,
			Map<TokenSpecification, Set<TokenSpecification>> precedences) 
	{
        int start,end;
        MatchedObject match;
        Token t;
        
        Token currentToken;
        TokenSpecification currentTS;
        
        int position=0;
        
        while (position<inputs.length()) {
        	
    		currentToken = null;
    		currentTS = null;

    		for (TokenSpecification ts: stspecs) {  // Try to match all tokens
    			
    			match = ts.getRecognizer().read(inputs,position);
    			
    			if (match != null) {
    				start = position;
    				end = position + match.getText().length() - 1;
    				
    				if (  ( currentToken==null) 
    				   || ( currentToken.getEndIndex() < end)
    				   || ( precedes(precedences,ts,currentTS) ) ) {
    					
        				t = new Token(ts.getType(),match.getObject(),start,end,match.getText());

        				if (build(ts,t)) {
        					currentTS = ts;
        					currentToken = t;
        				}
    				}
    			}    			
    		}

    		if (currentToken!=null) {
				// Token found
    			if (currentTS.getTokenOption()==TokenOption.CONSIDER) {
    				tokens.add(currentToken);
    			}
				position = currentToken.getEndIndex() + 1;
    			
    		} else { 
    			// No matches are found at the current position
    			// Alternatives:
    			// 1) position++; to ignore unrecognized input
    			// 2) return; to stop scanning
    			return;
    		}
        }
	}
	
	
	private boolean precedes (Map<TokenSpecification,Set<TokenSpecification>> precedences, TokenSpecification ts1, TokenSpecification ts2)
	{
		Set<TokenSpecification> precedes = precedences.get(ts1);
		
		if (precedes!=null)
			return precedes.contains(ts2);
		else
			return false;		
	}
	

    // Lexical graph generation step
    // -----------------------------
    
    private LexicalGraph buildLexicalGraph () 
    {
    	int inputStart = 0;
    	int inputEnd = inputs.length()-1;
    	
    	LexicalGraph graph = new LexicalGraph(tokens,inputStart,inputEnd);

    	for (int i=1; i<tokens.size(); i++)
    		graph.link( tokens.get(i-1), tokens.get(i) );
    	
    	if (tokens.size()>0) 
    		graph.addStartToken(tokens.get(0));

        return graph;
	}

}
