package main;


import java.io.*;
import java.util.Random;
import weka.classifiers.Evaluation;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.lazy.IBk;
import weka.classifiers.trees.Id3;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.converters.ArffLoader;
import weka.core.converters.ArffSaver;
import weka.core.converters.CSVLoader;
import weka.core.converters.CSVSaver;
 import weka.filters.Filter;
import weka.filters.supervised.attribute.AttributeSelection;
import weka.filters.unsupervised.attribute.Discretize;
import weka.filters.unsupervised.attribute.Normalize;
import weka.filters.unsupervised.attribute.Remove;
import weka.filters.unsupervised.attribute.ReplaceMissingValues;

/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

public class LogicImageSegmentation {
    
    
    public LogicImageSegmentation(){
        super();
        Init();
    }
    
    /**
     * Inisialisasi parameter-parameter yang diperlukan
     */
    public void Init(){
        //Initialize file loader
        m_ArffLoader = new ArffLoader();
        m_CSVLoader = new CSVLoader();
        m_ArffSaver = new ArffSaver();
        m_CSVSaver = new CSVSaver();
        
        //Initialize preprocessing method
        m_Discretize = new Discretize();
        m_Normalize = new Normalize();
        m_ReplaceMissingValues = new ReplaceMissingValues();
        m_AttributeSelection = new AttributeSelection();
        
        m_Remove = new Remove();
        options = new String[2];
        options[0] = "-R";
        options[1] = optionas1;
        
        //Initializes classifier algorithm
        m_IBk = new IBk(knn);
        m_NaiveBayes = new NaiveBayes();
        m_Id3 = new Id3();
        
        //Initializes classifier self-implement algorithm
        m_MyIBk = new MyIBk();
        m_MyNaiveBayes = new MyNaiveBayes();
        m_MyId3 = new MyId3();
    }
    
    public Instances m_Instances;//Instances untuk model
    public Instance m_Instance;//Instance untuk input OneDataTest
    public Instances m_ClassifyInstances;
    
    public Instances getInstances(){
        return m_Instances;
    }
    
    public void setInstances(Instances value){
        m_Instances = value;
    }
    
    private Evaluation m_Evaluation;
    
    private ArffLoader m_ArffLoader;
    private CSVLoader m_CSVLoader;
    private ArffSaver m_ArffSaver;
    private CSVSaver m_CSVSaver;
    private File m_File;
    
    private Discretize m_Discretize;
    private Normalize m_Normalize;//Attribute only
    private ReplaceMissingValues m_ReplaceMissingValues;
    private AttributeSelection m_AttributeSelection;
    private Remove m_Remove;
    
    public final static String FILE_DATA_SET_NAME = "src/java/data_set/image_segmentation_data";
    public final static String FILE_DATA_SET_NAME_ARFF = "src/java/data_set/image_segmentation_data.arff";
    public final static String FILE_DATA_SET_NAME_CSV = "src/java/data_set/image_segmentation_data.csv";
    public final static String FILE_DATA_TEST_NAME_ARFF = "src/java/data_set/image_segmentation_test.arff";
    public final static String FILE_DATA_TEST_NAME_CSV = "src/java/data_set/image_segmentation_test.csv";
    public final static String FILE_MODEL_NAME_WEKA_IBK = "src/java/model/image_segmentation_model_weka_ibk.model";
    public final static String FILE_MODEL_NAME_WEKA_ID3 = "src/java/model/image_segmentation_model_weka_id3.model";
    public final static String FILE_MODEL_NAME_WEKA_NAIVE_BAYES = "src/java/model/image_segmentation_model_weka_naive_bayes.model";
    public final static String FILE_MODEL_NAME_SI_IBK = "src/java/model/image_segmentation_model_si_ibk.model";
    public final static String FILE_MODEL_NAME_SI_ID3 = "src/java/model/image_segmentation_model_si_id3.model";
    public final static String FILE_MODEL_NAME_SI_NAIVE_BAYES = "src/java/model/image_segmentation_model_si_naive_bayes.model";
    public final static String FILE_DATA_TEST_ONE_NAME_ARFF = "src/java/data_set/image_segmentation_test_one.arff";
    public final static String FILE_DATA_TEST_ONE_NAME_CSV = "src/java/data_set/image_segmentation_test_one.csv";
    
    
    public final static int ARFF_FILE=0;
    public final static int CSV_FILE=1;
    public final static int USE_TEST_OPTION_OFFFSET = 11;
    public final static int USE_TRAINING_SET=11;
    public final static int USE_SUPPLIED_TEST_SET=12;
    public final static int USE_CROSS_VALIDATION=13;
    public final static int USE_PERCENTAGE_SPLIT=14;
    
    
    private boolean usediscretize = false;
    private boolean usenormalize = false;
    private boolean usereplacemissingvalues = false;
    private boolean useattributeselection = false;
    private boolean useremove = false;
    String optionas1;
    String[] options;
    
    //Weka Classifier
    public IBk m_IBk;
    private int knn=1;
    
    public NaiveBayes m_NaiveBayes;
    
    public Id3 m_Id3;
    
    
    //Self-implement Classifier
    public MyIBk m_MyIBk;
    public MyNaiveBayes m_MyNaiveBayes;
    public MyId3 m_MyId3;
    
    //Accuracy from model
    public double weka_ibk_acc=-1;
    public double weka_naive_bayes_acc=-1;
    public double weka_id3_acc=-1;
    public double si_ibk_acc=-1;
    public double si_naive_bayes_acc=-1;
    public double si_id3_acc=-1;
    
    //mode input data,10 untuk usetraining set,11 untuk crossvalidation,12 untuk percentagesplit
    public int datatestmode = USE_TRAINING_SET;
    private int folds=10;
    private double percentagesplit = 66.0;//Dalam percent
    
    //Variable for saving model
    OutputStream m_OutputStream;
    ObjectOutputStream m_ObjectOutputStream;
    
    //Variable for read model
    InputStream m_InputStream;
    ObjectInputStream m_ObjectInputStream;
    
    //Variabel m_OneDataTest untuk menyimpan suatu data yang ingin diinput
    public OneDataTest m_OneDataTest;
    
    //Konstanta untuk learning algorithm atau model
    public final static int USE_MODEL_OFFSET = 20;
    public final static int USE_WEKA_ID3 = 20;
    public final static int USE_WEKA_IBK = 21;
    public final static int USE_WEKA_NAIVE_BAYES = 22;
    public final static int USE_SI_ID3 = 23;
    public final static int USE_SI_IBK = 24;
    public final static int USE_SI_NAIVE_BAYES = 25;
    
    
    //Learning algorithm atau model yang dipakai, defaultnya IBK
    public int model_algorithm_used = 20;
    
    //Konstanta untuk preprocessing
    public final static int PREPROCESSING_OFFSET = 30;
    public final static int PREPROCESSING_DISCRETIZE = 30;
    public final static int PREPROCESSING_NORMALIZE = 31;
    public final static int PREPROCESSING_REPLACE_MISSING_VALUE = 32;
    public final static int PREPROCESSING_ATTRIBUTE_SELECTION = 33;
    public final static int PREPROCESSING_MANUAL_ATTRIBUTE_SELECTION = 34;
    
    public void setModel_algorithm_used(int value){
        model_algorithm_used = value;
    }
    
    public int getModel_algorithm_used(){
        return model_algorithm_used;
    }
    
    public void setModelWEKAIBK_algorithm_used(){
        setModel_algorithm_used(USE_WEKA_IBK);
    }
    
    public void setModelWEKANAIVEBAYES_algorithm_used(){
        setModel_algorithm_used(USE_WEKA_NAIVE_BAYES);
    }
    
    public void setModelWEKAID3_algorithm_used(){
        setModel_algorithm_used(USE_WEKA_ID3);
    }
    
    public void setModelSIIBK_algorithm_used(){
        setModel_algorithm_used(USE_SI_IBK);
    }
    
    public void setModelSINAIVEBAYES_algorithm_used(){
        setModel_algorithm_used(USE_SI_NAIVE_BAYES);
    }
    
    public void setModelSIID3_algorithm_used(){
        setModel_algorithm_used(USE_SI_ID3);
    }
    
    public void setUsediscretize(boolean value){
        usediscretize = value;
    }
    
    public void setUsenormalize(boolean value){
        usenormalize = value;
    }
    
    public void setUsereplacemissingvalues(boolean value){
        usereplacemissingvalues = value;
    }
    
    public void setUseattributeselection(boolean value){
        useattributeselection = value;
    }
    
    public void setOptionas1(String value){
        optionas1 = value;
    }
    
    public void setUseremove(boolean value){
        useremove = value;
    }
    
    public void setKnn(int value){
        knn = value;
        m_IBk.setKNN(knn);
        System.out.println("EAA : IBk.k=" + m_IBk.getKNN());
        m_MyIBk.setKNN(knn);
    }

    public void setDatatestmode(int value){
        datatestmode = value;
    }

    public void setFolds(int value){
        //Setting folds untuk weka
        folds = value;
    }
    
    public void setPercentagesplit(double value){
        percentagesplit = value;
    }
    
    public boolean getUsediscretize(){
        return usediscretize;
    }
    
    public boolean getUsenormalize(){
        return usenormalize;
    }
    
    public boolean getUsereplacemissingvalues(){
        return usereplacemissingvalues;
    }
    
    public String getOptionas1(){
        return optionas1;
    }
    
    public boolean getUseremove(){
        return useremove;
    }
    
    public int getKnn(){
        return knn;
    }

    public int getDatatestmode(int value){
        return datatestmode;
    }
    
    public int getFolds(){
        return folds;
    }
    
    public double getPercentagesplit(){
        return percentagesplit;
    }
    
    /**
     * Inisialisasi instances berdasarkan nama file dan tipe file
     * type file,0 untuk arff, 1 untuk csv
     */
    public void InisializesInstances(String filename,int typefile) throws IOException{
        if (typefile==LogicImageSegmentation.ARFF_FILE){
            m_File = new File(filename);
            m_ArffLoader.setSource(m_File);
            m_Instances = m_ArffLoader.getDataSet();
        }else if (typefile==LogicImageSegmentation.CSV_FILE){
            m_File = new File(filename);
            m_CSVLoader.setSource(m_File);
            m_Instances = m_CSVLoader.getDataSet();
        }
    }
    
    /**
     * Inisialisasi instances berdasarkan nama file dan tipe file,
     * instances diisi ke "instances"
     * type file,0 untuk arff, 1 untuk csv
     */
    public void InisializesInstances(Instances instances,String filename,int typefile) throws IOException{
        if (typefile==LogicImageSegmentation.ARFF_FILE){
            m_File = new File(filename);
            m_ArffLoader.setSource(m_File);
            instances = m_ArffLoader.getDataSet();
        }else if (typefile==LogicImageSegmentation.CSV_FILE){
            m_File = new File(filename);
            m_CSVLoader.setSource(m_File);
            instances = m_CSVLoader.getDataSet();
        }
//        System.out.println("Nilai instances : " + instances);
    }
    
    
    /**
     * Save instance to external file
     * @param fileoutput adalah nama fileoutput
     * @param typefile merupakan tipe file
     * @throws IOException 
     */
    public void SaveInstancesToExternalFile(String fileoutput,int typefile) throws IOException{
        if  (typefile==LogicImageSegmentation.ARFF_FILE){
            m_File = new File(fileoutput);
            m_ArffSaver.setInstances(m_Instances);
            m_ArffSaver.setFile(m_File);
            m_ArffSaver.writeBatch();
        }else if (typefile==LogicImageSegmentation.CSV_FILE){
            m_File = new File(fileoutput);
            m_CSVSaver.setInstances(m_Instances);
            m_CSVSaver.setFile(m_File);
            m_CSVSaver.writeBatch();
        }
    }
    
    
    public void SavePreprocessingToExternalFile(String fileoutputarff,String fileoutputcsv,int typepreprocessing,String[] options) throws Exception{
        typepreprocessing += PREPROCESSING_OFFSET;
        if (typepreprocessing==PREPROCESSING_DISCRETIZE){
            FilterUnsupervisedDiscretize();
            SaveInstancesToExternalFile(fileoutputarff, ARFF_FILE);
            SaveInstancesToExternalFile(fileoutputcsv, CSV_FILE);
        }else if (typepreprocessing==PREPROCESSING_NORMALIZE){
            FilterUnsupervisedNormalizeAttribute();
            SaveInstancesToExternalFile(fileoutputarff, ARFF_FILE);
            SaveInstancesToExternalFile(fileoutputcsv, CSV_FILE);
        }else if (typepreprocessing==PREPROCESSING_REPLACE_MISSING_VALUE){
            FilterUnsupervisedReplaceMissingValue();
            SaveInstancesToExternalFile(fileoutputarff, ARFF_FILE);
            SaveInstancesToExternalFile(fileoutputcsv, CSV_FILE);
        }else if (typepreprocessing==PREPROCESSING_ATTRIBUTE_SELECTION){
            FilterSupervisedAttributeSelection();
            SaveInstancesToExternalFile(fileoutputarff, ARFF_FILE);
            SaveInstancesToExternalFile(fileoutputcsv, CSV_FILE);
        }else if (typepreprocessing==PREPROCESSING_MANUAL_ATTRIBUTE_SELECTION){
            if (options!=null){
                this.options = options;
                System.out.println("Nilai options : " + this.options[1]);
                FilterManualSelection();
                SaveInstancesToExternalFile(fileoutputarff, ARFF_FILE);
                SaveInstancesToExternalFile(fileoutputcsv, CSV_FILE);
            }
        }
    }
    
    public static void SaveArffToCSV(String arfffile,String csvfile) throws IOException{
        //Inisialisasi file arff ke instances
        File file = new File(arfffile);
        System.out.println("arff : " + file);
        ArffLoader arffLoader = new ArffLoader();
        arffLoader.setSource(file);
        Instances instaces = arffLoader.getDataSet();
        
        //Save instances ke CSVSaver
        CSVSaver csvSaver = new CSVSaver();
        csvSaver.setInstances(instaces);
        file = new File(csvfile);
        csvSaver.setFile(file);
        System.out.println("arff : " +file);
        csvSaver.writeBatch();
    }
    
    public static void SaveArffToCSV() throws IOException{
        //Inisialisasi file arff ke instances
        File file = new File(LogicImageSegmentation.FILE_DATA_SET_NAME_ARFF);
        System.out.println("arff : " + file);
        ArffLoader arffLoader = new ArffLoader();
        arffLoader.setSource(file);
        Instances instaces = arffLoader.getDataSet();
        
        //Save instances ke CSVSaver
        CSVSaver csvSaver = new CSVSaver();
        csvSaver.setInstances(instaces);
        file = new File(LogicImageSegmentation.FILE_DATA_SET_NAME_CSV);
        csvSaver.setFile(file);
        System.out.println("arff : " +file);
        csvSaver.writeBatch();
    }
    
    public static void SaveCSVToArff(String csvfile,String arfffile) throws IOException{
        //Inisialisasi csv file ke instances
        File file = new File(csvfile);
        CSVLoader csvLoader = new CSVLoader();
        csvLoader.setSource(file);
        Instances instances = csvLoader.getDataSet();
        
        //Save instances ke ArffSaver
        ArffSaver arffSaver = new ArffSaver();
        arffSaver.setInstances(instances);
        file = new File(arfffile);
        arffSaver.setFile(file);
        arffSaver.writeBatch();
    }
    
    /**
     * Save instance to external file
     * @param fileoutput adalah nama fileoutput
     * @param typefile merupakan tipe file
     * @throws IOException 
     */
    public void SaveInstancesToExternalFile(Instances instances,String fileoutput,int typefile) throws IOException{
//        if (!IsDeleteFile(fileoutput)){
//            System.out.println("FILE TIDAK BERHASIL DIDELETE");
//            return;
//        }
        if  (typefile==LogicImageSegmentation.ARFF_FILE){
            m_File = new File(fileoutput);
            m_ArffSaver.setInstances(instances);
            m_ArffSaver.setFile(m_File);
            m_ArffSaver.writeBatch();
        }else if (typefile==LogicImageSegmentation.CSV_FILE){
            m_File = new File(fileoutput);
            m_CSVSaver.setInstances(instances);
            m_CSVSaver.setFile(m_File);
            m_CSVSaver.writeBatch();
        }
    }
    
    boolean IsDeleteFile(String filename){
        boolean success = (new File(filename)).delete();
        return success;
    }
    
    /**
     * Preprocessing untuk filter unsupervized Discretize
     * Use default Parameter
     * @throws Exception 
     */
    public void FilterUnsupervisedDiscretize() throws Exception{
        m_Discretize.setInputFormat(m_Instances);
        m_Instances = Filter.useFilter(m_Instances, m_Discretize);
    }
    
    /*
     * Sama seperti FilterUnsupervisedDiscretize() tapi yang difilter
     * adalah instances
     */
//    public void FilterUnsupervisedFileDiscretize(Instances instances) throws Exception{
//        m_Discretize.setInputFormat(instances);
//        instances = Filter.useFilter(instances, m_Discretize);
////        System.out.println("instances EA : " + instances);
//    }
    
    public void FilterUnsupervisedFileDiscretize(String filename) throws Exception{
        Instances instances = null;
        m_File = new File(filename);
        m_ArffLoader.setSource(m_File);
        instances = m_ArffLoader.getDataSet();
        m_Discretize.setInputFormat(instances);
        instances = Filter.useFilter(instances, m_Discretize);
        System.out.println("Nilai instance NOVAN : " + instances);
        SaveInstancesToExternalFile(instances, filename, ARFF_FILE);
    }
    
    
    /**
     * Melakukan preprocessing data awal
     * dengan cara normalisasi, semua untuk atribut numerik, nilainya menjadi diantara 0 dan 1
     * @throws Exception 
     */
    public void FilterUnsupervisedNormalizeAttribute() throws Exception{
        m_Normalize.setInputFormat(m_Instances);
        m_Instances = Filter.useFilter(m_Instances, m_Normalize);
    }
    
    /**
     * Melakukan preprocessing unsupervised normalize attribute
     * Semua missing values untuk atribut nominal dan numeric diganti dengan mode dan rataan dari training data
     * @throws Exception 
     */
    public void FilterUnsupervisedReplaceMissingValue() throws Exception{
        m_ReplaceMissingValues.setInputFormat(m_Instances);
        m_Instances = Filter.useFilter(m_Instances, m_ReplaceMissingValues);
    }
    
    /**
     * Melakukan preprocessing dengan secara manual menyeleksi atribut yang dipilih
     * @param options, options[0]=-"R", options[1]="first-3,4-last"
     * @throws Exception 
     */
    public void FilterManualSelection() throws Exception{
        m_Remove.setOptions(options);
        m_Remove.setInputFormat(m_Instances);
        m_Instances = Filter.useFilter(m_Instances, m_Remove);
    }
    
    /**
     * Melakukan preprocessing supervised attribute selection
     * Menggunakan bermacam-macam evaluation dan search method
     */
    public void FilterSupervisedAttributeSelection() throws Exception{
        m_AttributeSelection.setInputFormat(m_Instances);
        m_Instances = Filter.useFilter(m_Instances, m_AttributeSelection);
    }
    
    
    
    /**
     * Implement algoritma IBk dengan k merupakan 
     * Tanpa pilihan, dia akan menjalankan use training set
     * @throws Exception 
     */
    public void LearningAlgorithmWekaIBk() throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_IBk.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_IBk, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_IBk.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_IBk, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_IBk.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_IBk, test_Instances);
//            String[] options = new String[4];
//            options[0] = "-t";
//            options[1] = "src/java/data_set/kuisib.arff";
//            options[2] = "-split-percentage";
//            options[3] = "66";
//            String s = Evaluation.evaluateModel(m_IBk, options);
//            System.out.println(s);
        }else if(datatestmode==USE_SUPPLIED_TEST_SET){
//            System.out.println("USE SUPPLIED TEST SET IBK, k = " + m_IBk.getKNN());
            System.out.println("TOYYYYYY");
            m_IBk.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
//            m_Evaluation = new Evaluation(unlabeled);
//            System.out.println("INSTANCENOVAN : " + m_Instances);
//            System.out.println("UNLABELED : " + unlabeled);
            m_Evaluation.evaluateModel(m_IBk, unlabeled);
        }
        
        System.out.println("EVAL MODEL : " + m_Evaluation.toSummaryString());
        
//        Evaluation eval = new Evaluation(m_Instances);
//        eval.evaluateModel(m_IBk, m_Instances);
//        eval.crossValidateModel(m_IBk, m_Instances, 10,new Random(1));
//        System.out.println(eval.toSummaryString("\nResults\n======\n", false));
        System.out.println("Evaluasi correct ibk : " + m_Evaluation.pctCorrect());
        weka_ibk_acc = m_Evaluation.pctCorrect();
    }
    
    public void LearningAlgorithmWekaIBk(String fileoutputarff) throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_IBk.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_IBk, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_IBk.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_IBk, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_IBk.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_IBk, test_Instances);
//            String[] options = new String[4];
//            options[0] = "-t";
//            options[1] = "src/java/data_set/kuisib.arff";
//            options[2] = "-split-percentage";
//            options[3] = "66";
//            String s = Evaluation.evaluateModel(m_IBk, options);
//            System.out.println(s);
        }else if(datatestmode==USE_SUPPLIED_TEST_SET){
//            System.out.println("USE SUPPLIED TEST SET IBK, k = " + m_IBk.getKNN());
            System.out.println("TOYYYYYY");
            m_IBk.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(fileoutputarff)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
//            m_Evaluation = new Evaluation(unlabeled);
//            System.out.println("INSTANCENOVAN : " + m_Instances);
//            System.out.println("UNLABELED : " + unlabeled);
            m_Evaluation.evaluateModel(m_IBk, unlabeled);
        }
        
        System.out.println("EVAL MODEL : " + m_Evaluation.toSummaryString());
        
//        Evaluation eval = new Evaluation(m_Instances);
//        eval.evaluateModel(m_IBk, m_Instances);
//        eval.crossValidateModel(m_IBk, m_Instances, 10,new Random(1));
//        System.out.println(eval.toSummaryString("\nResults\n======\n", false));
        System.out.println("Evaluasi correct ibk : " + m_Evaluation.pctCorrect());
        weka_ibk_acc = m_Evaluation.pctCorrect();
    }
    
    /**
     * Implementasi code java dengan parameter default dari weka
     * Tidak ada parameter untuk input ke api weka
     */    
    public void LearningAlgorithmWekaNaiveBayes() throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_NaiveBayes.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_NaiveBayes, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_NaiveBayes, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_NaiveBayes.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_NaiveBayes, test_Instances);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
            m_NaiveBayes.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_Evaluation.evaluateModel(m_NaiveBayes, unlabeled);
        }
        System.out.println(m_Evaluation.toSummaryString());
        System.out.println("Evaluasi correct ibk : " + m_Evaluation.pctCorrect());
        weka_naive_bayes_acc = m_Evaluation.pctCorrect();
    }
    
    
    public void LearningAlgorithmWekaNaiveBayes(String fileoutputarff) throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_NaiveBayes.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_NaiveBayes, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_NaiveBayes, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_NaiveBayes.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_NaiveBayes, test_Instances);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
            m_NaiveBayes.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(fileoutputarff)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_Evaluation.evaluateModel(m_NaiveBayes, unlabeled);
        }
        System.out.println(m_Evaluation.toSummaryString());
        System.out.println("Evaluasi correct ibk : " + m_Evaluation.pctCorrect());
        weka_naive_bayes_acc = m_Evaluation.pctCorrect();
    }
    
    /**
     * Implement weka api with ID3 algorithm
     * Data must be nominal, numeric data or float must be discretized
     */
    public void LearningAlgorithmWekaID3() throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_Id3.buildClassifier(m_Instances);
//            System.out.println("m_Id3 : " + m_Id3);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_Id3, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_Id3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_IBk, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_Id3.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_Id3, test_Instances);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
            m_Id3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_Evaluation.evaluateModel(m_Id3, unlabeled);
        }
        
//        System.out.println("EVAL MODEL : " + m_Evaluation.toSummaryString());
        
        weka_id3_acc = m_Evaluation.pctCorrect();
        System.out.println("Weka-ID3 accuracy  : " + weka_id3_acc);
    }
    
    
    public void LearningAlgorithmWekaID3(String fileoutputarff) throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_Id3.buildClassifier(m_Instances);
//            System.out.println("m_Id3 : " + m_Id3);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_Id3, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_Id3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_IBk, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_Id3.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_Id3, test_Instances);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
            m_Id3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(fileoutputarff)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_Evaluation.evaluateModel(m_Id3, unlabeled);
        }
        
//        System.out.println("EVAL MODEL : " + m_Evaluation.toSummaryString());
        
        weka_id3_acc = m_Evaluation.pctCorrect();
        System.out.println("Weka-ID3 accuracy  : " + weka_id3_acc);
    }
    
    
    /**
     * IBK learning algorithm
     * parameter is k, the nearest neighbour taken
     * @throws Exception 
     */
    public void LearningAlgorithmSelfImplementIBk() throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_MyIBk = new MyIBk(m_Instances, knn);
            m_MyIBk.doClassification(m_MyIBk.inst);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            System.out.println("Cross validation SIIBK");
            m_MyIBk = new MyIBk(m_Instances, knn);
            m_MyIBk.setInstances(m_Instances);
            m_MyIBk.doCrossValidation(folds);
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            System.out.println("Percentage split SIIBK");
            m_MyIBk = new MyIBk(m_Instances, knn);
            m_MyIBk.doPercentageSplit(percentagesplit);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
//            m_IBk.buildClassifier(m_Instances);
//            m_Evaluation = new Evaluation(m_Instances);
//            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
//            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
////            m_Evaluation = new Evaluation(unlabeled);
////            System.out.println("INSTANCENOVAN : " + m_Instances);
////            System.out.println("UNLABELED : " + unlabeled);
//            m_Evaluation.evaluateModel(m_IBk, unlabeled);
            m_MyIBk = new MyIBk(m_Instances, knn);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_MyIBk.doClassification(unlabeled);
        }
        si_ibk_acc = m_MyIBk.getAccuracy();
        System.out.println("SI-IBK accuracy : " + si_ibk_acc);
    }
    
    public void LearningAlgorithmSelfImplementIBk(String fileoutputarff) throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_MyIBk = new MyIBk(m_Instances, knn);
            m_MyIBk.doClassification(m_MyIBk.inst);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            System.out.println("Cross validation SIIBK");
            m_MyIBk = new MyIBk(m_Instances, knn);
            m_MyIBk.setInstances(m_Instances);
            m_MyIBk.doCrossValidation(folds);
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            System.out.println("Percentage split SIIBK");
            m_MyIBk = new MyIBk(m_Instances, knn);
            m_MyIBk.doPercentageSplit(percentagesplit);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
//            m_IBk.buildClassifier(m_Instances);
//            m_Evaluation = new Evaluation(m_Instances);
//            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
//            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
////            m_Evaluation = new Evaluation(unlabeled);
////            System.out.println("INSTANCENOVAN : " + m_Instances);
////            System.out.println("UNLABELED : " + unlabeled);
//            m_Evaluation.evaluateModel(m_IBk, unlabeled);
            m_MyIBk = new MyIBk(m_Instances, knn);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(fileoutputarff)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_MyIBk.doClassification(unlabeled);
        }
        si_ibk_acc = m_MyIBk.getAccuracy();
        System.out.println("SI-IBK accuracy : " + si_ibk_acc);
    }
    
    /**
     * Self Implement Naive Bayes Learning Algorithm
     * No parameter
     * @throws Exception 
     */
    public void LearningAlgorithmSelfImplementNaiveBayes() throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            m_MyNaiveBayes.doClassification(m_MyNaiveBayes.inst);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            System.out.println("Cross validation SINaiveBayes");
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            m_MyNaiveBayes.setInstances(m_Instances);
            m_MyNaiveBayes.doCrossValidation(folds);
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            System.out.println("Percentage split SINaiveBayes");
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            m_MyNaiveBayes.doPercentageSplit(percentagesplit);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
//            m_IBk.buildClassifier(m_Instances);
//            m_Evaluation = new Evaluation(m_Instances);
//            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
//            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
////            m_Evaluation = new Evaluation(unlabeled);
////            System.out.println("INSTANCENOVAN : " + m_Instances);
////            System.out.println("UNLABELED : " + unlabeled);
//            m_Evaluation.evaluateModel(m_IBk, unlabeled);
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_MyNaiveBayes.doClassification(unlabeled);
        }
        si_naive_bayes_acc = m_MyNaiveBayes.getAccuracy();
        System.out.println("SI-NaiveBayes accuracy : " + si_naive_bayes_acc);
    }
    
    public void LearningAlgorithmSelfImplementNaiveBayes(String fileoutputarff) throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            m_MyNaiveBayes.doClassification(m_MyNaiveBayes.inst);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            System.out.println("Cross validation SINaiveBayes");
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            m_MyNaiveBayes.setInstances(m_Instances);
            m_MyNaiveBayes.doCrossValidation(folds);
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            System.out.println("Percentage split SINaiveBayes");
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            m_MyNaiveBayes.doPercentageSplit(percentagesplit);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
//            m_IBk.buildClassifier(m_Instances);
//            m_Evaluation = new Evaluation(m_Instances);
//            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
//            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
////            m_Evaluation = new Evaluation(unlabeled);
////            System.out.println("INSTANCENOVAN : " + m_Instances);
////            System.out.println("UNLABELED : " + unlabeled);
//            m_Evaluation.evaluateModel(m_IBk, unlabeled);
            m_MyNaiveBayes = new MyNaiveBayes(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(fileoutputarff)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_MyNaiveBayes.doClassification(unlabeled);
        }
        si_naive_bayes_acc = m_MyNaiveBayes.getAccuracy();
        System.out.println("SI-NaiveBayes accuracy : " + si_naive_bayes_acc);
    }
    
    /**
     * ID3 learning algorithm self implement
     * No parameter
     */
    public void LearningAlgorithmSelfImplementID3() throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_MyId3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_MyId3, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_MyId3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_MyId3, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_MyId3.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_MyId3, test_Instances);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
            m_MyId3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(FILE_DATA_TEST_NAME_ARFF)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_Evaluation.evaluateModel(m_MyId3, unlabeled);
        }
        si_id3_acc = m_Evaluation.pctCorrect();
        System.out.println("SI-ID3 accuracy : " + si_id3_acc);
    }
    
    public void LearningAlgorithmSelfImplementID3(String fileoutputarff) throws Exception{
        m_Instances.setClassIndex(m_Instances.numAttributes()-1);
        
        if (datatestmode==USE_TRAINING_SET){
            m_MyId3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.evaluateModel(m_MyId3, m_Instances);
        }else if (datatestmode==USE_CROSS_VALIDATION){
            m_MyId3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            m_Evaluation.crossValidateModel(m_MyId3, m_Instances, folds, new Random(1));
        }else if (datatestmode==USE_PERCENTAGE_SPLIT){
            m_Instances.randomize(new Random(1));
            int trainSize = (int)Math.round(m_Instances.numInstances()*percentagesplit/100);
            int testSize = m_Instances.numInstances() - trainSize;
            Instances training_Instances = new Instances(m_Instances, 0, trainSize);
            m_MyId3.buildClassifier(training_Instances);
            Instances test_Instances = new Instances(m_Instances, trainSize, testSize);
            m_Evaluation = new Evaluation(training_Instances);
            m_Evaluation.evaluateModel(m_MyId3, test_Instances);
        }else if (datatestmode==USE_SUPPLIED_TEST_SET){
            m_MyId3.buildClassifier(m_Instances);
            m_Evaluation = new Evaluation(m_Instances);
            Instances unlabeled = new Instances(new BufferedReader(new FileReader(fileoutputarff)));
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            m_Evaluation.evaluateModel(m_MyId3, unlabeled);
        }
        si_id3_acc = m_Evaluation.pctCorrect();
        System.out.println("SI-ID3 accuracy : " + si_id3_acc);
    }
    
    /**
     * All out all learning, used by jsp
     * file output maksudnya arff test data
     * @throws Exception 
     */
    public void AllOutLearningJSP(String fileoutputarff,String modelwekaibk,String modelwekanaivebayes,String modelwekaid3,String modelsiibk,String modelsinaivebayes,String modelsiid3) throws Exception{
        //Weka learning algorithm
        LearningAlgorithmWekaIBk(fileoutputarff);
        SaveModelWekaFileIBK(modelwekaibk);
        
        LearningAlgorithmWekaNaiveBayes(fileoutputarff);
        SaveModelWekaFileNAIVE_BAYES(modelwekanaivebayes);
        
        LearningAlgorithmWekaID3(fileoutputarff);
        SaveModelWekaFileID3(modelwekaid3);
        
        //Self-implement learning algorithm
        LearningAlgorithmSelfImplementIBk(fileoutputarff);
        SaveModelSIFileIBK(modelsiibk);
        
        LearningAlgorithmSelfImplementNaiveBayes(fileoutputarff);
        SaveModelSIFileNAIVE_BAYES(modelsinaivebayes);
        
        LearningAlgorithmSelfImplementID3(fileoutputarff);
        SaveModelSIFileID3(modelsiid3);
    }
    
    
    public void AllOutLearningNovan() throws Exception{
        //Weka learning algorithm
        LearningAlgorithmWekaIBk();
        SaveModelWekaFileIBK();
        
        LearningAlgorithmWekaNaiveBayes();
        SaveModelWekaFileNAIVE_BAYES();
        
        LearningAlgorithmWekaID3();
        SaveModelWekaFileID3();
        
        //Self-implement learning algorithm
        LearningAlgorithmSelfImplementIBk();
        SaveModelSIFileIBK();
        
        LearningAlgorithmSelfImplementNaiveBayes();
        SaveModelSIFileNAIVE_BAYES();
        
        LearningAlgorithmSelfImplementID3();
        SaveModelSIFileID3();
    }
    
    /**
     * Setting semua test option, dipanggil oleh jsp
     */
    public void SaveTestOption(int testoptiontype,int foldscv,double percentps){
        testoptiontype += USE_TEST_OPTION_OFFFSET;
        if (testoptiontype==USE_TRAINING_SET){
            setDatatestmode(testoptiontype);
        }else if (testoptiontype==USE_SUPPLIED_TEST_SET){
            setDatatestmode(testoptiontype);
        }else if (testoptiontype==USE_CROSS_VALIDATION){
            setDatatestmode(testoptiontype);
            setKnn(foldscv);
        }else if (testoptiontype==USE_PERCENTAGE_SPLIT){
            setDatatestmode(testoptiontype);
            setPercentagesplit(percentps);
        }
    }
    
    public String AccuracyLearningComparation(){
        String retval="Model            |   Accuracy(%) :\n";
        retval += "IBk              :  ";
        retval += Double.toString(weka_ibk_acc) + "   ";
        retval += "\n";
        retval += "Naive Bayes      :  ";
        retval += Double.toString(weka_naive_bayes_acc) + "   ";
        retval += "\n";
        retval += "Id3              :  ";
        retval += Double.toString(weka_id3_acc) + "   ";
        retval += "\n";
        retval += "SI kNN           :  ";
        retval += Double.toString(si_ibk_acc) + "   ";
        retval += "\n";
        retval += "SI NaiveBayes    :  ";
        retval += Double.toString(si_naive_bayes_acc) + "   ";
        retval += "\n";
        retval += "SI Decision tree :  ";
        retval += Double.toString(si_id3_acc) + "   ";
        retval += "\n";
        
        return retval;
    }
    
    /**
     * Save File model ibk only
     * precondition : ibk has been run and classify and build its model/build classifier
     * effect : file saved in folder model
     * @throws FileNotFoundException
     * @throws IOException 
     */
    public void SaveModelWekaFileIBK() throws FileNotFoundException, IOException{
        m_OutputStream = new FileOutputStream(FILE_MODEL_NAME_WEKA_IBK);
        m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
        m_ObjectOutputStream.writeObject(m_IBk);
//        m_ObjectOutputStream.close();
    }
    
    public void SaveModelWekaFileIBK(String fileoutputmodel) throws FileNotFoundException, IOException{
        m_OutputStream = new FileOutputStream(fileoutputmodel);
        m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
        m_ObjectOutputStream.writeObject(m_IBk);
//        m_ObjectOutputStream.close();
    }
    
    /**
     * Load model from model file
     * @throws FileNotFoundException
     * @throws IOException
     * @throws ClassNotFoundException 
     */
    public void LoadModelWekaFileIBK() throws FileNotFoundException, IOException, ClassNotFoundException{
        m_InputStream = new FileInputStream(FILE_MODEL_NAME_WEKA_IBK);
        m_ObjectInputStream = new ObjectInputStream(m_InputStream);
        m_IBk = (IBk)m_ObjectInputStream.readObject();
        m_ObjectInputStream.close();
    }
    
    /**
     * Save Naive Bayes model file to folder model
     * 
     */
    public void SaveModelWekaFileNAIVE_BAYES() throws FileNotFoundException, IOException{
        m_OutputStream = new FileOutputStream(FILE_MODEL_NAME_WEKA_NAIVE_BAYES);
        m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
        m_ObjectOutputStream.writeObject(m_NaiveBayes);
        m_ObjectOutputStream.close();
    }
    
    public void SaveModelWekaFileNAIVE_BAYES(String fileoutputmodel) throws FileNotFoundException, IOException{
        m_OutputStream = new FileOutputStream(fileoutputmodel);
        m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
        m_ObjectOutputStream.writeObject(m_NaiveBayes);
        m_ObjectOutputStream.close();
    }
    
    /**
     * 
     */
    public void LoadModelWekaFileNAIVE_BAYES() throws FileNotFoundException, IOException, ClassNotFoundException{
       m_InputStream = new FileInputStream(FILE_MODEL_NAME_WEKA_NAIVE_BAYES);
       m_ObjectInputStream = new ObjectInputStream(m_InputStream);
       m_NaiveBayes = (NaiveBayes)m_ObjectInputStream.readObject();
       m_ObjectInputStream.close();
    }
    
    /**
     * Save model file from ID3 classifier
     * Precondition : ID3 classifier has been build
     * Effect ID3 model file saved in folder model
     */
    public void SaveModelWekaFileID3() throws FileNotFoundException, IOException{
       m_OutputStream = new FileOutputStream(FILE_MODEL_NAME_WEKA_ID3);
       m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
       m_ObjectOutputStream.writeObject(m_Id3);
       m_ObjectOutputStream.close();
    }
    
    public void SaveModelWekaFileID3(String fileoutputmodel) throws FileNotFoundException, IOException{
       m_OutputStream = new FileOutputStream(fileoutputmodel);
       m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
       m_ObjectOutputStream.writeObject(m_Id3);
       m_ObjectOutputStream.close();
    }
    
    /**
     * Load id3 file in folder model
     * @throws FileNotFoundException
     * @throws IOException
     * @throws ClassNotFoundException 
     */
    public void LoadModelWekaFileID3() throws FileNotFoundException, IOException, ClassNotFoundException{
        m_InputStream = new FileInputStream(FILE_MODEL_NAME_WEKA_ID3);
        m_ObjectInputStream = new ObjectInputStream(m_InputStream);
        m_Id3 = (Id3)m_ObjectInputStream.readObject();
        m_ObjectInputStream.close();
    }
    
    /**
     * Saving model from IBK algorithm
     * To FILE_MODEL_NAME_SI_IBK
     */
    public void SaveModelSIFileIBK() throws FileNotFoundException, IOException, ClassNotFoundException{
//       m_OutputStream = new FileOutputStream(FILE_MODEL_NAME_SI_IBK);
//       m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
//       m_ObjectOutputStream.writeObject(m_MyIBk);
//       m_ObjectOutputStream.close();
        try {
            BufferedWriter out = new BufferedWriter(new FileWriter(FILE_MODEL_NAME_SI_IBK));
            out.write("Self-Implement k-NN, k = " + knn);
            out.close();
        }
        catch (IOException e)
        {
            System.out.println("Exception ");
        }
    }
    
    public void SaveModelSIFileIBK(String fileoutputmodel) throws FileNotFoundException, IOException, ClassNotFoundException{
        try {
            BufferedWriter out = new BufferedWriter(new FileWriter(fileoutputmodel));
            out.write("Self-Implement k-NN, k = " + knn);
            out.close();
        }
        catch (IOException e)
        {
            System.out.println("Exception ");
        }
    }
    
    /**
     * 
     */
    public void LoadModelSIFileIBK(){
        
    }
    
    public void SaveModelSIFileNAIVE_BAYES(){
        m_MyNaiveBayes.doLearn();
        System.out.println("NaiveBayes : " + m_MyNaiveBayes.getModel());
        try {
            BufferedWriter out = new BufferedWriter(new FileWriter(FILE_MODEL_NAME_SI_NAIVE_BAYES));
            out.write(m_MyNaiveBayes.getModel());
            out.close();
        }
        catch (IOException e)
        {
            System.out.println("Exception ");
        }
    }
    
    public void SaveModelSIFileNAIVE_BAYES(String fileoutputmodel){
        m_MyNaiveBayes.doLearn();
        try {
            BufferedWriter out = new BufferedWriter(new FileWriter(fileoutputmodel));
            out.write(m_MyNaiveBayes.getModel());
            out.close();
        }
        catch (IOException e)
        {
            System.out.println("Exception ");
        }
    }
    
    public void LoadModelSIFileNAIVE_BAYES(){
        
    }
    
    public void SaveModelSIFileID3() throws FileNotFoundException, IOException{
       m_OutputStream = new FileOutputStream(FILE_MODEL_NAME_SI_ID3);
       m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
       m_ObjectOutputStream.writeObject(m_MyId3);
       m_ObjectOutputStream.close();
    }
    
    public void SaveModelSIFileID3(String fileoutputmodel) throws FileNotFoundException, IOException{
       m_OutputStream = new FileOutputStream(fileoutputmodel);
       m_ObjectOutputStream = new ObjectOutputStream(m_OutputStream);
       m_ObjectOutputStream.writeObject(m_MyId3);
       m_ObjectOutputStream.close(); 
    }
    
    public void LoadModelSIFileID3(){
        
    }
    
    public String FormatInArffNominal(String input){
        if (!isFloat(input)){
            input = "\'\\" + input;
            input = input.substring(0, input.length()-1);
            input = input + "\\\'\'";
        }
        return input;
    }
    
    /**
    * Nambah satudata ke akhir file. File masukan diasumsikan bertipe .arff
    * @param one data baru
    * @param filename file yang akan ditambahkan
    */
    public void appendData(OneDataTest one, String filename) throws Exception {
        System.out.println("Nama file yang ditulis : " + filename);
        FileWriter fw = new FileWriter(filename, true);
        BufferedWriter bw = new BufferedWriter(fw);
        StringBuilder sb = new StringBuilder();
        sb.append(FormatInArffNominal(one.valatr[0]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[1]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[2]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[3]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[4]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[5]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[6]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[7]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[8]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[9]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[10]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[11]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[12]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[13]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[14]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[15]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[16]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[17]));
        sb.append(",");
        sb.append(FormatInArffNominal(one.valatr[18]));
        sb.append(",");
        sb.append(one.valatr[19]);
        sb.append("\n");
        bw.write(sb.toString());
        bw.close();
    }
    
    /**
     * TEsting if s is a string of float or not
     * @param s
     * @return 
     */
    public static boolean isFloat(String s){
        try {
            Double.parseDouble(s);
        } catch (NumberFormatException e) {
            return false;
        }
        return true;
    }
    
    public void adjustInputToInstance(Instance instance,int idx,String value){
        if (isFloat(value)){//Tipe float
//            System.out.println("FLOAT");
            instance.setValue(idx, Double.parseDouble(value));
        }else{//Tipe string
//            System.out.println("BUKAN FLOAT");
//            System.out.println("value adjust : " + value);
            instance.setValue(idx, value);
            
        }
//        System.out.println("EAA");
    }

    public Instances getInstancesFromOneInput(String structurefileinput,OneDataTest oneDataTest) throws IOException{
        //Inisialisasi m_OneDataTest
        m_OneDataTest = new OneDataTest(oneDataTest.valatr);
        
        m_File = new File(structurefileinput);
        m_ArffLoader.setFile(m_File);
        Instances retval = m_ArffLoader.getStructure();
//        System.out.println("Structure : " + retval);
        m_Instance = new Instance(retval.numAttributes());
        m_Instance.setDataset(retval);
        
        
//        m_Instance.setValue(0, "\'(-inf-26.1]\'");
        for(int i=0;i<oneDataTest.TOTAL_ATTRIBUTE;++i){
            adjustInputToInstance(m_Instance, i, oneDataTest.valatr[i]);
        }
        retval.add(m_Instance);
        
//        System.out.println("RETVAL : \n" + retval);
//        adjustInputToInstance(m_Instance, 0, oneDataTest.valatr[0]);
//        adjustInputToInstance(m_Instance, 0, oneDataTest.valatr[0]);
        
        System.out.println("Instance-nya : " + m_Instance);
        return retval;
    }
    
    /**
     * Mereturn string dengan syarat
     * m_ClassifyInstance dan m_Instance sudah terinisialisasi
     * @return String kelas menurut model
     * @throws Exception 
     */
    public String getClassifyFromOneInput() throws Exception{
        Instances unlabeled = null;
        Instances labeled = null;
        if (model_algorithm_used==USE_WEKA_IBK){
            System.out.println("DATA ONE :\n" + m_Instance);
//            m_IBk.classifyInstance(m_Instance);
//            double classLabel = m_IBk.classifyInstance(m_Instance);
//            m_Instance.setClassValue(classLabel);
            
            unlabeled = m_ClassifyInstances;
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);

            labeled = new Instances(unlabeled);
            for(int i=0;i<unlabeled.numInstances();i++){
                double clsLabel = m_IBk.classifyInstance(unlabeled.instance(i));
                labeled.instance(i).setClassValue(clsLabel);
                System.out.println(i + " : " + labeled.instance(i).stringValue(labeled.numAttributes()-1));
            }
            
        }else if (model_algorithm_used==USE_WEKA_NAIVE_BAYES){
            // double classLabel = m_NaiveBayes.classifyInstance(m_Instances);
            // m_Instance.setClassValue(classLabel);
            System.out.println("DATA ONE :\n" + m_Instance);
            unlabeled = m_ClassifyInstances;
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);

            labeled = new Instances(unlabeled);
            
//            System.out.println("Nilai UNLABELED : " + unlabeled);
            for(int i=0;i<unlabeled.numInstances();++i){
                    double clsLabel = m_NaiveBayes.classifyInstance(unlabeled.instance(i));
                    labeled.instance(i).setClassValue(clsLabel);
                    System.out.println(i + " : " + labeled.instance(i).stringValue(labeled.numAttributes()-1));
            }
        }else if (model_algorithm_used==USE_WEKA_ID3){
            // double classLabel = m_Id3.classifyInstance(m_Instance);
            // m_Instance.setClassValue(classLabel);
//            System.out.println("m_Id3 : " + m_Id3);
//            System.out.println("m_ClassifyInstance : " + m_ClassifyInstances);
//            System.out.println("DATA ONE :\n" + m_Instance);
//            System.out.println("m_Id3 : " + m_Id3);
            unlabeled = m_ClassifyInstances;
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            
            labeled = new Instances(unlabeled);
            for(int i=0;i<unlabeled.numInstances();++i){
//                    System.out.println("door, instancenya : " + unlabeled.instance(i).stringValue(unlabeled.numAttributes()-1));
                    double clsLabel = m_Id3.classifyInstance(unlabeled.instance(i));
                    labeled.instance(i).setClassValue(clsLabel);
                    System.out.println(i + " : " + labeled.instance(i).stringValue(labeled.numAttributes()-1));
            }
        }else if (model_algorithm_used==USE_SI_IBK){
            System.out.println("USE SI IBK CLASSIFY ONE");
            //TODO
            unlabeled = m_ClassifyInstances;
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            
            labeled = new Instances(unlabeled);
            
            m_MyIBk.doClassification(labeled);
//            System.out.println("ONE : " + unlabeled.instance(0).stringValue((unlabeled.numAttributes()-1)));
//            System.out.println("TWO : " + labeled.instance(0).stringValue((labeled.numAttributes()-1)));
//            return unlabeled.instance(0).stringValue((unlabeled.numAttributes()-1));
        }else if (model_algorithm_used==USE_SI_NAIVE_BAYES){
            System.out.println("USE SI NaiveBayes CLASSIFY ONE");
            //TODO
            unlabeled = m_ClassifyInstances;
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            
            labeled = new Instances(unlabeled);
            
            m_MyNaiveBayes.doClassification(labeled);
        }else if (model_algorithm_used==USE_SI_ID3){
            unlabeled = m_ClassifyInstances;
            unlabeled.setClassIndex(unlabeled.numAttributes()-1);
            labeled = new Instances(unlabeled);
            for(int i=0;i<unlabeled.numInstances();++i){
//                    System.out.println("door, instancenya : " + unlabeled.instance(i).stringValue(unlabeled.numAttributes()-1));
                    double clsLabel = m_MyId3.classifyInstance(unlabeled.instance(i));
                    labeled.instance(i).setClassValue(clsLabel);
                    System.out.println(i + " : " + labeled.instance(i).stringValue(labeled.numAttributes()-1));
            }
        }
        m_Instance = labeled.instance(0);
        m_ClassifyInstances = labeled;
        return m_Instance.stringValue(m_Instance.numAttributes()-1);
    }
    
    /**
     * Mengklasifikasikan satu input saja
     * @param fileinputstruture, file model yang dipakai
     * @param filedatatestoutputarff, file datatest one arff
     * @param filedatatestoutputcsv,  file datatest one csvs
     * @param odt, One data test
     * @param valuemodelalgorithmused
     * @throws IOException 
     */
    
    public String resultClassifyOneInput="";
    
    public void FinalClassify(String fileinputstruture,String filedatatestoutputarff,String filedatatestoutputcsv,OneDataTest odt,int valuemodelalgorithmused) throws IOException, Exception{
        valuemodelalgorithmused += USE_MODEL_OFFSET;
        setModel_algorithm_used(valuemodelalgorithmused);
        
        m_ClassifyInstances = getInstancesFromOneInput(fileinputstruture,odt);
        SaveInstancesToExternalFile(m_ClassifyInstances, filedatatestoutputarff, LogicImageSegmentation.ARFF_FILE);
//        System.out.println("m_ClassifyInstances : " + lis.m_ClassifyInstances);
//        lis.FilterUnsupervisedFileDiscretize(LogicImageSegmentation.FILE_DATA_TEST_ONE_NAME_ARFF);
        resultClassifyOneInput = getClassifyFromOneInput();
        m_OneDataTest.valatr[m_OneDataTest.TOTAL_ATTRIBUTE-1] = resultClassifyOneInput;
        System.out.println("TEBAKAN MODEL " + resultClassifyOneInput);
//        System.out.println("TEBAKAN MENURUT MODEL : " + getClassifyFromOneInput());
//        
//        SaveInstancesToExternalFile(m_ClassifyInstances, filedatatestoutputarff, ARFF_FILE);
//        SaveArffToCSV(filedatatestoutputarff, filedatatestoutputcsv);
    }
    
    public static void main(String[] args) throws IOException, Exception{
        LogicImageSegmentation lis = new LogicImageSegmentation();
//        String fileinput="src/java/data_set/kuisib.arff";
        String fileinput="src/java/data_set/segment_discretize_code.arff";
//        String fileinput = LogicImageSegmentation.FILE_DATA_SET_NAME_ARFF;
        lis.InisializesInstances(fileinput, LogicImageSegmentation.ARFF_FILE);
//        System.out.println(lis.m_Instances);
        
//        lis.FilterUnsupervisedFileDiscretize(lis.m_Instances);
        
//        lis.FilterUnsupervisedNormalizeAttribute();
//        lis.FilterUnsupervisedReplaceMissingValue();
//        lis.FilterSupervisedAttributeSelection();
//        lis.options[1]="1,2,3,last";
//        lis.FilterManualSelection();
//        String fileoutput="src/java/data_set/segmentremove_code.arff";
//        lis.SaveInstancesToExternalFile(fileoutput, ARFF_FILE);
//        System.out.println(lis.m_Instances);
        
//        lis.setKnn(3);
        
        lis.setDatatestmode(LogicImageSegmentation.USE_TRAINING_SET);
//        lis.setFolds(10);
//        lis.setDatatestmode(LogicImageSegmentation.USE_CROSS_VALIDATION);
//        lis.setFolds(3);
//        lis.setDatatestmode(LogicImageSegmentation.USE_PERCENTAGE_SPLIT);
//        lis.setPercentagesplit(66.0);
//        lis.setDatatestmode(LogicImageSegmentation.USE_SUPPLIED_TEST_SET);
        
//        lis.SavePreprocessingToExternalFile(LogicImageSegmentation.FILE_DATA_SET_NAME_ARFF, LogicImageSegmentation.FILE_DATA_SET_NAME_CSV, LogicImageSegmentation.PREPROCESSING_DISCRETIZE-LogicImageSegmentation.PREPROCESSING_OFFSET, null);
        
//        lis.FilterUnsupervisedDiscretize();
//        lis.FilterUnsupervisedFileDiscretize(LogicImageSegmentation.FILE_DATA_TEST_NAME_ARFF);

//        lis.setKnn(1);        
//        lis.LearningAlgorithmWekaIBk();
//        lis.setModelWEKAIBK_algorithm_used();
        
//        lis.LearningAlgorithmWekaNaiveBayes();
//        lis.setModelWEKANAIVEBAYES_algorithm_used();
       
//       lis.setModelWEKAID3_algorithm_used();
//       System.out.println("m_Instances : " + lis.m_Instances);
//       lis.LearningAlgorithmWekaID3();
//       System.out.println("m_Id3 : " + lis.m_Id3);
       
        lis.LearningAlgorithmSelfImplementNaiveBayes();
        lis.setModelSINAIVEBAYES_algorithm_used();
        lis.SaveModelSIFileNAIVE_BAYES();
        
//        lis.LearningAlgorithmWekaIBk();
//        lis.setModelWEKAIBK_algorithm_used();
//        lis.LearningAlgorithmSelfImplementIBk();
//        lis.setModelSIIBK_algorithm_used();
//        System.out.println("wekaid3 : " + lis.m_IBk);
//        System.out.println("siid3 : " + lis.m_MyIBk);
//        System.out.println("wekaid3 acc : " + lis.weka_ibk_acc);
//        System.out.println("siid3 acc : " + lis.si_ibk_acc);
//        
//        lis.LearningAlgorithmWekaID3();
//        lis.LearningAlgorithmSelfImplementID3();
//        lis.LearningAlgorithmSelfImplementID3();
//        lis.setModelSIID3_algorithm_used();
//        System.out.println("wekaid3 : " + lis.m_Id3);
//        System.out.println("siid3 : " + lis.m_MyId3);
//        System.out.println("wekaid3 acc : " + lis.weka_id3_acc);
//        System.out.println("siid3 acc : " + lis.si_id3_acc);
        
        lis.AllOutLearningNovan();
//        lis.SaveTestOption(USE_TRAINING_SET, 0, 0);
//        System.out.println(lis.AccuracyLearningComparation());
//        
//        lis.SaveModelFileIBK();
//        lis.SaveModelFileID3();
//        lis.SaveModelFileNAIVE_BAYES();
        
//        lis.LoadModelFileIBK();
//        lis.LoadModelFileID3();
//        lis.LoadModelFileNAIVE_BAYES();
        
//        System.out.println("IBK : \n" + lis.m_IBk);
//        System.out.println("ID3 : \n" + lis.m_Id3);
//        System.out.println("Naive Bayes : \n"+ lis.m_NaiveBayes);
        
//        System.out.println(lis.m_Evaluation);
//        Instances unlabeled = new Instances(new BufferedReader(new FileReader("src/java/data_set/unlabeledkuisib.arff")));
//        unlabeled.setClassIndex(unlabeled.numAttributes()-1);
        
//        Instances labeled = new Instances(unlabeled);
//        for(int i=0;i<unlabeled.numInstances();i++){
//            double clsLabel = lis.m_IBk.classifyInstance(unlabeled.instance(i));
//            labeled.instance(i).setClassValue(clsLabel);
//            System.out.println(i + " : " + labeled.instance(i).stringValue(labeled.numAttributes()-1));
//        }
//        
//        BufferedWriter writer = new BufferedWriter(
//                           new FileWriter("src/java/data_set/labeledkuisib.arff"));
//        writer.write(labeled.toString());
//        writer.newLine();
//        writer.write(unlabeled.toString());
//        writer.newLine();
//        writer.flush();
//        writer.close();
        
        //Instansiasi satu input saja
        String[] value = new String[20];
        value[0]="1";
        value[1]="1";
        value[2]="1";
        value[3]="0";
        value[4]="0";
        value[5]="0.277778";
        value[6]="0.062963";
        value[7]="0.666667";
        value[8]="0.311111";
        value[9]="6.185185";
        value[10]="7.333334";
        value[11]="7.666667";
        value[12]="3.555556";
        value[13]="3.444444";
        value[14]="4.444445";
        value[15]="-7.888889";
        value[16]="7.777778";
        value[17]="0.545635";
        value[18]="-1.121818";
        value[19]="GRASS";
        
        value[0]="\'(126.5-151.6]\'";
        value[1]="\'(106.6-130.5]\'";
        value[2]="\'All\'";
        value[3]="\'(-inf-0.011111]\'";
        value[4]="\'(-inf-0.022222]\'";
        value[5]="\'(-inf-2.55]\'";
        value[6]="\'(-inf-57.29964]\'";
        value[7]="\'(-inf-4.472223]\'";
        value[8]="\'(-inf-138.63292]\'";
        value[9]="\'(-inf-14.344444]\'";
        value[10]="\'(-inf-13.688889]\'";
        value[11]="\'(-inf-15.088889]\'";
        value[12]="\'(-inf-14.255556]\'";
        value[13]="\'(0.377778-inf)\'";
        value[14]="\'(-0.822222-8.022222]\'";
        value[15]="\'(-9.577777--4.333332]\'";
        value[16]="\'(-inf-15.088889]\'";
        value[17]="\'(0.5-0.6]\'";
        value[18]="\'(-1.451774--0.912186]\'";
        value[19]="PATH";
        OneDataTest odt = new OneDataTest(value);
//        lis.m_ClassifyInstances = lis.getInstancesFromOneInput(fileinput,odt);
//        lis.SaveInstancesToExternalFile(lis.m_ClassifyInstances, LogicImageSegmentation.FILE_DATA_TEST_ONE_NAME_ARFF, LogicImageSegmentation.ARFF_FILE);
//        System.out.println("m_ClassifyInstances : " + lis.m_ClassifyInstances);
//        lis.FilterUnsupervisedFileDiscretize(LogicImageSegmentation.FILE_DATA_TEST_ONE_NAME_ARFF);
//        System.out.println("TEBAKAN MENURUT MODEL : " + lis.getClassifyFromOneInput());
//        
//        lis.SaveInstancesToExternalFile(lis.m_ClassifyInstances, LogicImageSegmentation.FILE_DATA_TEST_ONE_NAME_ARFF, LogicImageSegmentation.ARFF_FILE);
//        LogicImageSegmentation.SaveArffToCSV(LogicImageSegmentation.FILE_DATA_SET_NAME_ARFF, LogicImageSegmentation.FILE_DATA_SET_NAME_CSV);
//        lis.FinalClassify(fileinput, LogicImageSegmentation.FILE_DATA_TEST_ONE_NAME_ARFF, LogicImageSegmentation.FILE_DATA_TEST_ONE_NAME_CSV, odt, LogicImageSegmentation.USE_SI_NAIVE_BAYES-LogicImageSegmentation.USE_MODEL_OFFSET);
//        lis.appendData(lis.m_OneDataTest, fileinput);
        
    }
}
