package com.gr.analys;
/* ViTokenizer <Version.1>
 * @Author: LongTienSinh
 * @AS2-HEDSPI-SoIT
 * @Hanoi University of Science and Technology
 */
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.analysis.Tokenizer;

import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import vn.hus.nlp.tokenizer.TokenizerProvider;
import vn.hus.nlp.tokenizer.tokens.TaggedWord;

public class ViTokenizer extends Tokenizer {

    private static List<TaggedWord> listWords;
    private static int _word_index = 0;
    private static int _max_word = 0;
    private static vn.hus.nlp.tokenizer.Tokenizer tokenizer;
    private int offset = 0, bufferIndex = 0, dataLen = 0;
    private static final int MAX_WORD_LEN = 1000;
    private static final int IO_BUFFER_SIZE = 1000000;
    private char[] ioBuffer = new char[IO_BUFFER_SIZE];
    private String Buffer;
    private TermAttribute termAtt;
    private OffsetAttribute offsetAtt;
    private BufferedReader bin;

    /*********************************************************************/
    public ViTokenizer(Reader input) throws IOException {
        super(input);
        offsetAtt = addAttribute(OffsetAttribute.class);
        termAtt = addAttribute(TermAttribute.class);
        tokenizer = TokenizerProvider.getInstance().getTokenizer();
      //  bin = new BufferedReader (input);
        
        // System.out.println("Contructing...............................................");
    }

    

	public ViTokenizer(Reader reader, String a) {
		// TODO Auto-generated constructor stub
	}



	private void fillVietTokens(String str) throws IOException {
     //   String str = String.valueOf(in);
        str = str.replace("\r", ".");
//        System.out.println("String:*"+str+"*");
        Reader reader = new StringReader(str);

        try {
            // tokenizer.dispose();

            tokenizer.tokenize(reader);
        } catch (Exception e) {
            e.printStackTrace();
        }
        listWords = new ArrayList<TaggedWord>(tokenizer.getResult());
         _max_word = listWords.size();
         _word_index = 0;
    }

    protected boolean isTokenChar(char c) {
       return Character.isLetterOrDigit(c);
    //	return Character.isLetter(c);
    }



    protected char normalize(char c) {
        return c;
    }
 
    public ArrayList<TaggedWord> getList()
    {
    	try {
            // tokenizer.dispose();

            tokenizer.tokenize(input);
        } catch (Exception e) {
            e.printStackTrace();
        }
    	return new ArrayList<TaggedWord>(tokenizer.getResult());
    }
    @Override
    public final boolean incrementToken() throws IOException {
        clearAttributes();
        int length = 0;
        int start = bufferIndex;
        char[] buffer = termAtt.termBuffer();
        while (true) {
            if (bufferIndex >= dataLen) { //
            	 dataLen = input.read(ioBuffer);
            	  if (dataLen == -1) {// cuối cùng
                      dataLen = 0;                            // so next offset += dataLen won't decrement offset
                      if (length > 0) {
                          break;
                      } else {
                          return false;
                      }
                 }
                offset += dataLen;  
                long start1 = System.currentTimeMillis();
                Buffer=String.valueOf(ioBuffer);
                Buffer=Buffer.substring(0,dataLen);
                long end = System.currentTimeMillis();
                //System.out.println( (end - start1) + " milliseconds");
             
                start1 = System.currentTimeMillis();
                fillVietTokens(Buffer);
                end = System.currentTimeMillis();
                //System.out.println( (end - start1) + " milliseconds");
                bufferIndex = 0;
             }

            	if(_word_index>_max_word-1) return false;
                String _s = listWords.get(_word_index).toString();
            
                while (isTokenChar(_s.charAt(0)) == false) {
                    _word_index++;
                    if(_word_index>_max_word-1) return false;
                    _s = listWords.get(_word_index).toString();
             
                }
                Token1 _token=Word.substringT(Buffer, start, Word.contain(_s));
                length=_token.getToken().length();
                start=_token.getStart();
                bufferIndex=bufferIndex+length;
                break;
           //     String _token = String.valueOf(ioBuffer).substring(start, start + length);
//                if (isTokenChar(_token.charAt(length - 1)) == false) {
//                    _token = _token.substring(0, --length);
//                }
               // System.out.println(Word.contain(_s));
               // System.out.println(_s);
               // System.out.println(_token);
//                if (_token.equals(_s)) {
//                    break;
//                } else {
//                    continue;
//                }
            
        }
   //     int i = buffer.length;
        termAtt.setTermBuffer(listWords.get(_word_index).toString());
        termAtt.setTermLength(listWords.get(_word_index).toString().length());       
     //   termAtt.setTermLength(length);
        offsetAtt.setOffset(correctOffset(start), correctOffset(bufferIndex));
        if (_word_index < _max_word) {
            _word_index++;
        }
        //System.out.println("Token: "+ "["+ String.valueOf(ioBuffer).substring(start,start+length)+ "]");
        return true;
    }

    @Override
    public final void end() {
        // set final offset
        int finalOffset = correctOffset(offset);
        offsetAtt.setOffset(finalOffset, finalOffset);
        //tokenizer.dispose();
    }

    @Override
    public void reset(Reader input) throws IOException {
        super.reset(input);
        ioBuffer = new char[IO_BUFFER_SIZE];
        bufferIndex = 0;
        offset = 0;
        dataLen = 0;
    }
}