package com.metaweb.lessen.tokenizers;

import com.metaweb.lessen.tokens.ComponentColor;
import com.metaweb.lessen.tokens.NumericToken;
import com.metaweb.lessen.tokens.Token;
import com.metaweb.lessen.tokens.Token.Type;


public class RegroupingTokenizer implements Tokenizer {
    final protected BufferedTokenizer   _tokenizer;
    protected Token                     _token;
    
    public RegroupingTokenizer(Tokenizer tokenizer) {
        _tokenizer = new BufferedTokenizer(tokenizer);
        
        _token = _tokenizer.getToken();
        resolve();
    }
    
    @Override
    public Token getToken() {
        return _token;
    }

    @Override
    public void next() {
        _tokenizer.next();
        _token = _tokenizer.getToken();
        resolve();
    }
    
    protected Token eatWhitespaceAndComment(StringBuffer sb) {
        Token t;
        while ((t = _tokenizer.getToken()) != null && (t.type == Type.Whitespace || t.type == Type.Comment)) {
            sb.append(t.text);
            _tokenizer.next();
        }
        return t;
    }
    
    protected void resolve() {
        if (_token != null) {
            if (_token.type == Type.Function && 
                (_token.text.equals("rgb(") || _token.text.equals("rgba("))) {
                
                int tokenStart = _token.start;
                
                StringBuffer sb = new StringBuffer();
                sb.append(_token.text);
                _tokenizer.next();
                _token = eatWhitespaceAndComment(sb);
                
                NumericToken[] components = new NumericToken[4];
                int component = 0;
                
                while (component < components.length && 
                        _token != null && 
                        _token instanceof NumericToken &&
                        (_token.type == Type.Percentage || _token.type == Type.Number)
                    ) {
                    components[component++] = (NumericToken) _token;
                    
                    sb.append(_token.text);
                    _tokenizer.next();
                    _token = eatWhitespaceAndComment(sb);
                    
                    if (_token != null && _token.type == Type.Delimiter && _token.text.equals(",")) {
                        sb.append(_token.text);
                        _tokenizer.next();
                        _token = eatWhitespaceAndComment(sb);
                    }
                }
                
                if (_token != null && _token.type == Type.Delimiter && _token.text.equals(")")) {
                    sb.append(_token.text);
                    // don't eat the ). the next time next() is called, it'll do that
                }
                
                String text = sb.toString();
                Token newToken = new ComponentColor(
                    tokenStart,
                    tokenStart += text.length(),
                    text,
                    components[0],
                    components[1],
                    components[2],
                    components[3]
                );
                _token = newToken;
            }
        }
    }
}
