package com.pc.cloud.anlysis;

import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.util.AttributeSource;

import java.io.IOException;
import java.io.Reader;

public class OneCharTokenizer extends Tokenizer {
	
	private int offset = 0, bufferIndex = 0, dataLen = 0;
	private static final int MAX_WORD_LEN = 255;
	private static final int IO_BUFFER_SIZE = 4096;
	private final char[] ioBuffer = new char[IO_BUFFER_SIZE];
	  
	private TermAttribute termAtt;
	private OffsetAttribute offsetAtt;
	private PositionIncrementAttribute posIncrAtt;
	
	
	public OneCharTokenizer(Reader input) {
		super(input);
	    offsetAtt = addAttribute(OffsetAttribute.class);
	    termAtt = addAttribute(TermAttribute.class);
	    posIncrAtt = addAttribute(PositionIncrementAttribute.class);
	}

	public OneCharTokenizer(AttributeSource source, Reader input) {
		super(source, input);
	    offsetAtt = addAttribute(OffsetAttribute.class);
	    termAtt = addAttribute(TermAttribute.class);
	    posIncrAtt = addAttribute(PositionIncrementAttribute.class);
	}

	public OneCharTokenizer(AttributeFactory factory, Reader input) {
	    super(factory, input);
	    offsetAtt = addAttribute(OffsetAttribute.class);
	    termAtt = addAttribute(TermAttribute.class);
	    posIncrAtt = addAttribute(PositionIncrementAttribute.class);
	}
	
	protected char normalize(char c) {
	    return Character.toLowerCase(c);
	}
	
	@Override
	public boolean incrementToken() throws IOException {
		clearAttributes();
	    int length = 0;
	    int start = bufferIndex;
	    char[] buffer = termAtt.termBuffer();
	    int skippedPositions = 0;
	    while (true) {

	      if (bufferIndex >= dataLen) {
	        offset += dataLen;
	        dataLen = input.read(ioBuffer);
	        if (dataLen == -1) {
	          dataLen = 0;                            // so next offset += dataLen won't decrement offset
	          if (length > 0)
	            break;
	          else
	            return false;
	        }
	        bufferIndex = 0;
	      }

	      final char c = ioBuffer[bufferIndex++];

		  boolean flag = false;
		  switch(Character.getType(c)) {

	          case Character.DECIMAL_DIGIT_NUMBER:
	        	  flag = true;   
	        	  break;
	          case Character.LOWERCASE_LETTER:
	        	  flag = true;
	        	  break;
	          case Character.UPPERCASE_LETTER:
	        	  flag = true;
	        	  break;
	          case Character.OTHER_LETTER:             
	        	  flag = true;	          
          }
	      if(flag){
	    	  if (length == 0)
	              start = offset + bufferIndex - 1;
	            else if (length == buffer.length)
	              buffer = termAtt.resizeTermBuffer(1+length);
	    	  
	    	  buffer[length++] = normalize(c);
	    	  	    	  
	    	  break;
	      } else {
	    	  skippedPositions++;
	      }	                          
	    }

	    termAtt.setTermLength(length);
	    offsetAtt.setOffset(correctOffset(start), correctOffset(start+length));
	    this.posIncrAtt.setPositionIncrement(this.posIncrAtt.getPositionIncrement() + skippedPositions);
	    return true;
	}

}
