package com.metaweb.lessen;

import java.util.LinkedList;
import java.util.List;

import com.metaweb.lessen.expr.Evaluable;
import com.metaweb.lessen.expr.FixedTokenEvaluable;
import com.metaweb.lessen.expr.LessMixinFunction;
import com.metaweb.lessen.expr.OperatorCall;
import com.metaweb.lessen.expr.VariableAccessEvaluable;
import com.metaweb.lessen.tokenizers.BufferedTokenizer;
import com.metaweb.lessen.tokenizers.Tokenizer;
import com.metaweb.lessen.tokens.Token;
import com.metaweb.lessen.tokens.UriToken;
import com.metaweb.lessen.tokens.Token.Type;

abstract public class ParserBase {
    final protected BufferedTokenizer _tokenizer;
    final protected ResourceFinder _resourceFinder;
    final protected Scope _scope;
    final protected boolean _topLevel;

    protected ParserBase(BufferedTokenizer tokenizer, ResourceFinder resourceFinder, Scope scope, boolean topLevel) {
        _tokenizer = tokenizer;
        _resourceFinder = resourceFinder;
        _scope = scope;
        _topLevel = topLevel;
    }
    
    protected LessFunctionDefinitionParser createLessFunctionDefinitionParser() {
        return new LessFunctionDefinitionParser(_tokenizer, _resourceFinder, _scope);
    }

    protected Tokenizer createCssBlockParser(boolean nestChildren) {
        return new BlockParser(_tokenizer, _resourceFinder, new Scope(_scope), null, nestChildren);
    }

    abstract protected void outputToken(Token t);
    
    protected void outputEvaluable(Evaluable e) {
        Token t = e.eval(_scope);
        if (t != null) {
            outputToken(t);
        } else {
            outputToken(new Token(Type.Invalid, -1, -1, "ERROR"));
        }
    }
    
    abstract protected void passInnerBlockTokenThrough(Token t);
    
    protected void parseBody() {
        Token t;
        while ((t = _tokenizer.getToken()) != null) {
            if (t.type == Type.Delimiter && t.text.equals("}")) {
                break;
            }
            parseStatement();
        }
    }
    
    protected void parseStatement() {
        Token t;
        
        while ((t = _tokenizer.getToken()) != null && (
                t.type == Type.Whitespace || 
                t.type == Type.Comment ||
                t.type == Type.CDataOpen ||
                t.type == Type.CDataClose
            )) {
            outputToken(t);
            _tokenizer.next();
        }
        
        if (t != null) {
            if (t.type == Type.Delimiter && t.text.equals("}")) {
                return;
            }
            
            int lookahead = 1;
            
            Token t2;
            while ((t2 = _tokenizer.getToken(lookahead)) != null) {
                if (t2.type == Type.Delimiter) {
                    if (t2.text.equals(";") || t2.text.equals("}")) {
                        break;
                    } else if (t2.text.equals("{")) {
                        parseBlockStatement();
                        return;
                    }
                }
                
                lookahead++;
            }
            parseFlatStatement();
        }
    }
    
    protected void parseFlatStatement() {
        Token t = _tokenizer.getToken();
        if (t.type == Type.AtIdentifier) {
            int lookahead = lookOverWhitespaceAndComment(1);
            
            Token t2 = _tokenizer.getToken(lookahead);
            if (t2 != null) {
                if (t2.type == Type.Delimiter && t2.text.equals(":")) {
                    _tokenizer.next(lookahead + 1);
                    parseVariableDeclaration(t);
                    return;
                } else if (
                        _resourceFinder != null &&
                        t2.type == Type.Uri && 
                        (t.text.equals("@import") || t.text.equals("@import-less"))
                    ) {
                    
                    String where = ((UriToken) t2).unquotedText;
                    Tokenizer subTokenizer = _resourceFinder.open(where);
                    if (subTokenizer != null) {
                        subTokenizer = new LessParser(
                            subTokenizer, 
                            _resourceFinder.rebase(where), 
                            _scope
                        );
                        
                        if (t.text.equals("@import")) {
                            while (lookahead > 0) {
                                outputToken(_tokenizer.getToken());
                                _tokenizer.next();
                                lookahead--;
                            }
                            outputToken(t2);
                            _tokenizer.next();
                            
                            lookahead = lookOverWhitespaceAndComment(0);
                            t2 = _tokenizer.getToken(lookahead);
                            if (t2 != null && t2.type == Type.Delimiter && t2.text.equals(";")) {
                                while (lookahead > 0) {
                                    outputToken(_tokenizer.getToken());
                                    _tokenizer.next();
                                    lookahead--;
                                }
                                outputToken(t2);
                                _tokenizer.next();
                            }
                        } else if (t.text.equals("@import-less")) {
                            _tokenizer.next(lookahead + 1); // swallow tokens
                            
                            lookahead = lookOverWhitespaceAndComment(0);
                            t2 = _tokenizer.getToken(lookahead);
                            if (t2 != null && t2.type == Type.Delimiter && t2.text.equals(";")) {
                                _tokenizer.next(lookahead + 1); // swallow tokens
                            }
                        }
                        
                        while (subTokenizer.getToken() != null) {
                            subTokenizer.next();
                        }
                        
                        return;
                    }
                }
            }
        }
        
        if (t.type == Type.Operator && t.text.equals(".")) {
            Token t2 = _tokenizer.getToken(1);
            if (t2 != null) {
                if (t2.type == Type.Function) {
                    _tokenizer.next(2);
                    
                    parseRestOfMixinFunctionInvocation(t2);
                    return;
                } else if (t2.type == Type.Identifier) {
                    int lookahead = lookOverWhitespaceAndComment(2);
                    Token t3 = _tokenizer.getToken(lookahead);
                    if (t3 != null && t3.type == Type.Delimiter) {
                        if (t3.text.equals(";")) {
                            _tokenizer.next(lookahead + 1);
                            
                            invokeMixinFunction(t2, null);
                            return;
                        } else if (t3.text.equals("}")) {
                            _tokenizer.next(lookahead);
                            
                            invokeMixinFunction(t2, null);
                            return;
                        } else if (t3.text.equals("(")) {
                            _tokenizer.next(lookahead + 1);
                            
                            parseRestOfMixinFunctionInvocation(t2);
                            return;
                        }
                    }
                }
            }
        }
        
        while ((t = _tokenizer.getToken()) != null) {
            if (t.type == Type.Delimiter) {
                if (t.text.equals("}")) {
                    break;
                } else if (t.text.equals(";")) {
                    _tokenizer.next();
                    
                    outputToken(t);
                    break;
                }
            }
            
            Object o = parseExpression(true);
            if (o instanceof Token) {
                outputToken(t);
            } else {
                outputEvaluable((Evaluable) o);
            }
        }
    }
    
    protected void parseBlockStatement() {
        Token t = _tokenizer.getToken();
        if (t.type == Type.Operator && t.text.equals(".")) {
            Token t2 = _tokenizer.getToken(1);
            if (t2 != null) {
                if (t2.type == Type.Identifier) {
                    int lookahead = lookOverWhitespaceAndComment(2);
                    Token t3 = _tokenizer.getToken(lookahead);
                    if (t3 != null && t3.type == Type.Delimiter && t3.text.equals("(")) {
                        LessFunctionDefinitionParser tokenizer = 
                            createLessFunctionDefinitionParser();
                        
                        _scope.put(tokenizer.getIdentifier(), 
                                tokenizer.getFunction());
                        
                        return;
                    }
                } else if (t2.type == Type.Function) {
                    LessFunctionDefinitionParser tokenizer = 
                        createLessFunctionDefinitionParser();
                    
                    _scope.put(tokenizer.getIdentifier(),
                            tokenizer.getFunction());
                    
                    return;
                }
            } 
        }
        
        Tokenizer tokenizer = createCssBlockParser(t.type == Type.AtIdentifier);
        while ((t = tokenizer.getToken()) != null) {
            passInnerBlockTokenThrough(t);
            tokenizer.next();
        }
    }
    
    protected int lookOverWhitespaceAndComment(int lookahead) {
        return _tokenizer.lookOverWhitespaceAndComment(lookahead);
    }
    
    protected void parseVariableDeclaration(Token atIdentifier) {
        _tokenizer.next(lookOverWhitespaceAndComment(0));
        
        Object expr = parseExpression(true);
        
        int lookahead = lookOverWhitespaceAndComment(0);
        Token t = _tokenizer.getToken(lookahead);
        if (t != null && t.type == Type.Delimiter && t.text.equals(";")) {
            _tokenizer.next(lookahead + 1);
        }
        
        if (expr != null) {
            if (expr instanceof Evaluable) {
                expr = ((Evaluable) expr).eval(_scope);
            }
            _scope.put(atIdentifier.text, expr);
        }
    }
    
    protected Object parseExpression(boolean bareTokenOK) {
        Object term = parseTerm(bareTokenOK);
        if (term == null || LessParser.canBeIntermediateValue(term)) {
            return term;
        }
        
        Evaluable expr = (Evaluable) term;
        while (true) {
            int lookahead = lookOverWhitespaceAndComment(0);
            
            Token t2 = _tokenizer.getToken(lookahead);
            if (t2 != null && t2.type.equals(Type.Operator) &&
                (t2.text.equals("+") || t2.text.equals("-"))) {
                
                _tokenizer.next(lookahead + 1);
                _tokenizer.next(lookOverWhitespaceAndComment(0));
                
                Object term2 = parseTerm(false);
                if (term2 != null) {
                    expr = new OperatorCall(t2.text, expr, (Evaluable) term2);
                } else {
                    break;
                }
            } else {
                break;
            }
        }
        return expr;
    }
    
    protected Object parseTerm(boolean bareTokenOK) {
        Object factor = parseFactor(bareTokenOK);
        if (factor == null || !(factor instanceof Evaluable)) {
            return factor;
        }
        
        Evaluable term = (Evaluable) factor;
        while (true) {
            int lookahead = lookOverWhitespaceAndComment(0);
            
            Token t2 = _tokenizer.getToken(lookahead);
            if (t2 != null && t2.type.equals(Type.Operator) &&
                (t2.text.equals("*") || t2.text.equals("/"))) {
                
                _tokenizer.next(lookahead + 1);
                _tokenizer.next(lookOverWhitespaceAndComment(0));
                
                Object factor2 = parseFactor(false);
                if (factor2 != null) {
                    term = new OperatorCall(t2.text, term, (Evaluable) factor2);
                } else {
                    break;
                }
            } else {
                break;
            }
        }
        return term;
    }
    
    protected Object parseFactor(boolean bareTokenOK) {
        Token t = _tokenizer.getToken();
        if (t == null) {
            return null;
        } else {
            if (t.type == Type.Number || 
                t.type == Type.Percentage ||
                t.type == Type.Dimension ||
                t.type == Type.Color ||
                t.type == Type.HashName ||
                t.type == Type.Identifier) {
                
                _tokenizer.next();
                
                return new FixedTokenEvaluable(t);
                
            } else if (t.type == Type.AtIdentifier) {
                
                _tokenizer.next();
                
                return new VariableAccessEvaluable(t, t.text);
                
            } else if (t.type == Type.Delimiter) {
                if (t.text.equals("(")) {
                    _tokenizer.next();
                    
                    Object expr = null;
                    
                    int lookahead = lookOverWhitespaceAndComment(0);
                    Token t2 = _tokenizer.getToken(lookahead);
                    if (t2 != null) {
                        _tokenizer.next(lookahead); // eat whitespace
                        
                        expr = parseExpression(false);
                    }
                    
                    lookahead = lookOverWhitespaceAndComment(0);
                    t2 = _tokenizer.getToken(lookahead);
                    if (t2 != null && t2.type == Type.Delimiter && t2.text.equals(")")) {
                        _tokenizer.next(lookahead + 1);
                    }
                    
                    return expr;
                }
                
            }
        }
        
        if (bareTokenOK) {
            _tokenizer.next();
            return t;
        } else {
            return null;
        }
    }
    
    protected void parseRestOfMixinFunctionInvocation(Token functionNameToken) {
        List<Token> arguments = new LinkedList<Token>();
        
        while (true) {
            int lookahead = lookOverWhitespaceAndComment(0);
            
            Token t = _tokenizer.getToken(lookahead);
            if (t == null || (t.type == Type.Delimiter && t.text.equals(")"))) {
                break;
            }
            _tokenizer.next(lookahead); // eat the white space
            
            Object o = parseExpression(true);
            if (o == null) {
                break;
            } else if (o instanceof Token) {
                arguments.add((Token) o);
            } else {
                arguments.add(((Evaluable) o).eval(_scope));
            }
            
            lookahead = lookOverWhitespaceAndComment(0);
            t = _tokenizer.getToken(lookahead);
            if (t != null && t.type == Type.Delimiter && t.text.equals(",")) {
                _tokenizer.next(lookahead + 1);
            }
        }
        
        int lookahead = lookOverWhitespaceAndComment(0);
        Token t = _tokenizer.getToken(lookahead);
        if (t != null && t.type == Type.Delimiter && t.text.equals(")")) {
            _tokenizer.next(lookahead + 1);
        }
        
        lookahead = lookOverWhitespaceAndComment(0);
        t = _tokenizer.getToken(lookahead);
        if (t != null && t.type == Type.Delimiter && t.text.equals(";")) {
            _tokenizer.next(lookahead + 1);
        }
        
        invokeMixinFunction(functionNameToken, arguments);
    }
    
    protected void invokeMixinFunction(Token functionNameToken, List<Token> arguments) {
        String name = functionNameToken.type == Type.Function ?
                functionNameToken.text.substring(0, functionNameToken.text.length() - 1) :
                    functionNameToken.text;
                
        Object o = _scope.get(name);
        if (o != null && o instanceof LessMixinFunction) {
            List<Token> results = ((LessMixinFunction) o).invoke(arguments, _scope);
            for (Token t : results) {
                outputToken(t);
            }
        }
    }
}
