/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package plagiadetector.driver;

import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import weka.core.tokenizers.*;
/**
 *
 * @author Dell
 */
public class WekaTokenizerDriver {
    public static void main(String[] args){
        NGramTokenizer NGT = new NGramTokenizer();
        AlphabeticTokenizer AT = new AlphabeticTokenizer();
        WordTokenizer WT = new WordTokenizer();
        CharNGramTokenizer CNGT = new CharNGramTokenizer(2);
        
        String s = "1.HOWEVER, 2.the 3.egg 4.only 5.got 6.larger 7.and 8.larger, 9.and 10.more 11.and 12.more 13.human";
        
        try {
            
            System.out.println("-----------------Testing NGramTokenizer--------------------");
            NGT.setOptions(new String[]{"-min", "1", "-max", "1"});
            NGT.tokenize(s);
            while(NGT.hasMoreElements()){
                System.out.println(NGT.nextElement());
            }
            
            System.out.println("-----------------Testing AlphabeticTokenizer--------------------");
            AT.tokenize(s);
            while(AT.hasMoreElements()){
                System.out.println(AT.nextElement());
            }
            
            System.out.println("-----------------Testing WordTokenizer--------------------");
            WT.tokenize(s);
            while(WT.hasMoreElements()){
                System.out.println(WT.nextElement());
            }
            
            System.out.println("-----------------Testing CharNGramTokenizer--------------------");
            CNGT.tokenize(s);
          
            while(CNGT.hasMoreElements()){
                System.out.println(CNGT.nextElement());
            }
            
        } catch (Exception ex) {
            Logger.getLogger(WekaTokenizerDriver.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

class CharNGramTokenizer extends Tokenizer {

    private ArrayList<String> arrStrings;
    public int currentToken;
    private int ngram;
    
    /**
     * creates CharNGramTokenizer object, with n size = 1.
     */
    public CharNGramTokenizer(){
        arrStrings = new ArrayList<String>();
        currentToken = 0;
        ngram = 1; //default n gram
    }
    /**
     * creates CharNGramTokenizer object, with preferred n size.
     * @param n 
     */
    public CharNGramTokenizer(int n){
        arrStrings = new ArrayList<String>();
        currentToken = 0;
        ngram = n; 
    }
    @Override
    public String globalInfo() {
       return "Tokenize character level n-gram(s) from a string.";
    }

    @Override
    public boolean hasMoreElements() {
        return currentToken < arrStrings.size();
    }

    @Override
    public Object nextElement(){
        currentToken++;
        return arrStrings.get(currentToken-1);
    }

    @Override
    public void tokenize(String s) {
        System.out.println("Panjang input : "+s.length());
        System.out.println("Panjang N-gram : "+ngram);
        String buff;
        for(int i=0; i<s.length();i++){
            buff = "";
            System.out.println("panjang input - i :"+(s.length()-i));
            if((s.length()-i) >= ngram){ //bila panjang yang diinginkan masih terpenuhi
                    for(int j=0; j< ngram; j++){
                        System.out.println("char at "+(i+j)+": "+s.charAt(i+j));
                        buff += s.charAt(i+j);
                }
                System.out.println("token ke "+i+", hasil buff : "+buff);
                arrStrings.add(buff); 
            }
            
       }
       buff = null;
    }

    @Override
    public String getRevision() {
        throw new UnsupportedOperationException("Not supported yet.");
    }

    /**
     * Set n value of this object.
     * @param n size of n-gram 
     */
    public void setNValue(int n){
        ngram = n;
    }
}
