package com.bestv.search.engine.analyzer;

import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.Tokenizer;

import java.io.IOException;
import java.io.Reader;

public class LikeTokenizer extends Tokenizer {
    private static final int IO_BUFFER_SIZE = 4096;
    private final char[] ioBuffer = new char[IO_BUFFER_SIZE];
    private static final int MAX_LENGTH = 12;
    private int bufferIndex = 0, dataLen = 0, length = 2;
    public LikeTokenizer(Reader input) {
        super(input);
    }

    protected char normalize(char c) {
        return c;
    }

    @SuppressWarnings("deprecation")
    public final Token next(final Token reusableToken) throws IOException {
        assert reusableToken != null;
        reusableToken.clear();
        int start = bufferIndex;

        if (length >= dataLen) {
            dataLen = input.read(ioBuffer);
            if (dataLen == -1) {
                return null;
            }
            bufferIndex = 0;
            length = 2;
            start = 0;
        }

        if (start + length > dataLen) {
            length++;
            start = 0;
            bufferIndex = 0;
            if (length > dataLen || length > MAX_LENGTH) {
                return null;
            }
        }

        bufferIndex++;

        StringBuilder sb = new StringBuilder();
        for (int i = start; i < start + length; i++) {
            sb.append(normalize(ioBuffer[i]));
        }

        if (length == IO_BUFFER_SIZE) // buffer overflow!
            return null;

        reusableToken.setTermText(sb.toString());
        reusableToken.setTermLength(length);
        reusableToken.setStartOffset(start);
        reusableToken.setEndOffset(start + length);
        return reusableToken;
    }

    public void reset(Reader input) throws IOException {

        super.reset(input);

        bufferIndex = 0;

        dataLen = 0;

        length = 0;
    }
}