/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package getrootbahasa;

import com.mysql.jdbc.PreparedStatement;
import com.stemaini.core.doc.Dokumen;
import com.stemaini.core.doc.Terms;
import com.stemaini.core.doc.Vektor;
import com.stemaini.core.database.Kamus;
import com.stemaini.core.doc.VektorNode;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import taini.koneksi.KoneksiAini;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.misc.SerializedClassifier;
import weka.core.Debug;
import weka.core.Instances;
import weka.core.converters.ArffLoader;

/**
 *
 * @author WaOnEmperoR
 */
public class MainV3_GenARFF_Training {

    public static void main(String[] args){
        KoneksiAini theKoneksi = new KoneksiAini();
        ArrayList<String> kata = new ArrayList<String>();
        String isi = "";
        String prepare = "";

        try {
            theKoneksi.connectFirst();
            String sql = "Select Word from Word_IDF order by Word";
            ResultSet res = theKoneksi.executeSelect(sql);
            while (res.next()){
                isi=(res.getString(1));
                prepare = prepare + isi + ",";

                System.out.println(isi);
            }
        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
        prepare = prepare + "class_kat\n";

        try {
            //Pengulangan menurut nomor dokumen
            //Lebih bagus jika menggunakan Multithreading (di atas dokumen ke-50 proses komputasi melambat)

            /*theKoneksi.connectFirst();
            String sql = "Select Word from Word_IDF order by Word";
            ResultSet res = theKoneksi.executeSelect(sql);
            while(res.next())
            {
                kata.add(res.getString(1));
            }*/
            for (int j=1; j<=270; j++){

                double jumlah3 = 0.00;

                System.out.println("Dokumen ke : " + j);

                String sql = "select aa.word, aa.Nomor_Dokumen, coalesce(TF_IDF,0) as TF_IDF from (select word, nomor_dokumen from word_idf b  join (select distinct nomor_dokumen from word_frec_no) a where Nomor_Dokumen = " + j + ") aa left join word_frec_no bb on aa.word=bb.kata and aa.Nomor_Dokumen=bb.Nomor_Dokumen order by aa.word";
                ResultSet res = theKoneksi.executeSelect(sql);
                while(res.next())
                {
                        jumlah3 = Double.parseDouble(res.getString(3));
                        prepare = prepare + jumlah3 + ",";
                }
/*
                //while (res.next()){
                for(int i=0; i < kata.size(); i++)
                {
                    //isi=(res.getString(1));
                    isi=kata.get(i);
                   // String sql2 = "select count(*) as Hasil from word_frec_no where Kata = '" + isi + "' and Nomor_Dokumen = " + j;
                    String sql2 = "select count(Kata), TF_IDF from word_frec_no where Kata = '" + isi + "' and Nomor_Dokumen = " + j
                                + " Group by TF_IDF HAVING count(Kata) > 0";

                    //System.out.println(sql2);
                    ResultSet res2 = theKoneksi.executeSelect(sql2);

                    double jumlah2 = 0.00;

                     while (res2.next()){
                            jumlah2 = Double.parseDouble(res2.getString(2));
                       }

                    if(jumlah2 > 0.00)
                    {
                       prepare = prepare + jumlah2 + ",";
                    }
                    else
                    {
                        prepare = prepare + "0,";
                    }
*/
                    
                   /* int jumlah = 0;
                    while (res2.next()){
                        jumlah = Integer.parseInt(res2.getString(1));
                    }

                    //Kalau dokumen mengandung kata yang dihitung IDF-nya
                    if (jumlah>0){
                        String sql3 = "select TF_IDF from word_frec_no where Kata = '" + isi + "' and Nomor_Dokumen = " + j;
                        ResultSet res3 = theKoneksi.executeSelect(sql3);

                        double jumlah2 = 0.00;

                        while (res3.next()){
                            jumlah2 = Double.parseDouble(res3.getString(1));
                        }

                        prepare = prepare + jumlah2 + ",";
                    }
                    else{
                        prepare = prepare + "0,";
                    }*/
                    
                //}
                String sql4 = "select Kategori from Berita where Nomor_Dokumen = " + j;
                ResultSet res4 = theKoneksi.executeSelect(sql4);

                String jadi = "";

                while (res4.next()){
                    jadi = res4.getString(1);
                }

                prepare = prepare + jadi + "\n";
            }

            try {
                FileWriter fstream = new FileWriter("preparebesar.csv");
                BufferedWriter out = new BufferedWriter(fstream);
                out.write(prepare);
                out.close();
            } catch (IOException ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }

            CSVtoArff konv = new CSVtoArff("preparebesar.csv", "datasetbesar.arff");

            konv.Convert();
        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    public static void TrainAndSaveModel(String dataTraining, String fileSave) throws Exception{
        NaiveBayes nb = new NaiveBayes();//Bisa metode lain

        try {
            ArffLoader loader = new ArffLoader();
            loader.setSource(new File(dataTraining));//Masukkan data training
            Instances train = loader.getDataSet();
            train.setClassIndex(train.numAttributes() - 1);
            nb.buildClassifier(train);//Membangun model klasifikasi

            Debug.saveToFile(fileSave, nb);//Menyimpan model klasifikasi

        } catch (IOException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}
