package com.gr.analys;
/* ViTokenizer <Version.1>
 * @Author: LongTienSinh
 * @AS2-HEDSPI-SoIT
 * @Hanoi University of Science and Technology
 */
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;

import org.apache.lucene.analysis.Tokenizer;

import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import vn.hus.nlp.tokenizer.TokenizerProvider;
import vn.hus.nlp.tokenizer.tokens.TaggedWord;

public class ViTokenizer1 extends Tokenizer {

    private static List<TaggedWord> listWords;
    private static int _word_index = 0;
    private static int _max_word = 0;
    private static vn.hus.nlp.tokenizer.Tokenizer tokenizer;
    private int offset = 0, bufferIndex = 0, dataLen = 0, start1=0;
    String sentences = new String();
    private TermAttribute termAtt;
    private OffsetAttribute offsetAtt;
    private BufferedReader bin;

    /*********************************************************************/
    public ViTokenizer1(Reader input) throws IOException {
        super(input);
        offsetAtt = addAttribute(OffsetAttribute.class);
        termAtt = addAttribute(TermAttribute.class);
        tokenizer = TokenizerProvider.getInstance().getTokenizer();
        bin = new BufferedReader (input);
        
        // System.out.println("Contructing...............................................");
    }

    

	public ViTokenizer1(Reader reader, String a) {
		// TODO Auto-generated constructor stub
	}



	private void fillVietTokens(char[] in) throws IOException {
        String str = String.valueOf(in);
        str = str.replace("\r", ".");
//        System.out.println("String:*"+str+"*");
        Reader reader = new StringReader(str);

        try {
            // tokenizer.dispose();

            tokenizer.tokenize(reader);
        } catch (Exception e) {
            e.printStackTrace();
        }
        listWords = new ArrayList<TaggedWord>(tokenizer.getResult());
         _max_word = listWords.size();
         _word_index = 0;
    }

    protected boolean isTokenChar(char c) {
       return Character.isLetterOrDigit(c);
    //	return Character.isLetter(c);
    }



    protected char normalize(char c) {
        return c;
    }

    @Override
    public final boolean incrementToken() throws IOException {
        clearAttributes();
        int length = 0;
        int start=dataLen;
        char[] buffer = termAtt.termBuffer();
       // String sentences =new String();
        while (true) {
               	if(_word_index>_max_word-1||sentences.length()==0)
                {
               		sentences = bin.readLine();
               		if(sentences==null) return false;
                	while(Word.contain(sentences)==0)
                	{
                		sentences = bin.readLine();
                		if(sentences==null) return false;
                	}
                	fillVietTokens(sentences.toCharArray());
                	_word_index=0;
                	start=0;
                	if(bufferIndex!=0)
                	{
                		start1=bufferIndex+2;
                		bufferIndex=bufferIndex+2;
                	}
                }
                String _s = listWords.get(_word_index).toString();
                while (isTokenChar(_s.charAt(0)) == false) {
                    _word_index++;
                    if(_word_index>_max_word-1) 
                    	{
                    	 {
                        		sentences = bin.readLine();
                        		if(sentences==null) return false;
                         	while(Word.contain(sentences)==0)
                         	{
                         		sentences = bin.readLine();
                         		if(sentences==null) return false;
                         	}
                         	fillVietTokens(sentences.toCharArray());
                         	_word_index=0;
                         	start=0;
                         	if(bufferIndex!=0)
                         	{
                         		start1=bufferIndex+2;
                         		bufferIndex=bufferIndex+2;
                         	}
                         }
                    	}
                     _s = listWords.get(_word_index).toString();
                 }
                if(start>=sentences.length()) return true;
                Token1 _token=Word.substringT(sentences, start, Word.contain(_s));
                length=_s.length();
                dataLen=start+length;
                start=_token.getStart();
              
         termAtt.setTermBuffer(listWords.get(_word_index).toString());
         termAtt.setTermLength(listWords.get(_word_index).toString().length());       
         offsetAtt.setOffset(correctOffset(start1+start), correctOffset(bufferIndex+length));
         _word_index++;
         bufferIndex=bufferIndex+length;
         return true;
        }
   
}
    @Override
    public final void end() {
        // set final offset
        int finalOffset = correctOffset(offset);
        offsetAtt.setOffset(finalOffset, finalOffset);
        //tokenizer.dispose();
    }

    @Override
    public void reset(Reader input) throws IOException {
        super.reset(input);
        bufferIndex = 0;
        bin = new BufferedReader (input);
        offset = 0;
        dataLen = 0;
    }
}
