/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package getrootbahasa;

/**
 *
 * @author WaOnEmperoR
 */
import com.mysql.jdbc.PreparedStatement;
import com.stemaini.core.doc.Dokumen;
import com.stemaini.core.doc.Terms;
import com.stemaini.core.doc.Vektor;
import com.stemaini.core.database.Kamus;
import com.stemaini.core.doc.VektorNode;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import taini.koneksi.KoneksiAini;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.misc.SerializedClassifier;
import weka.core.Debug;
import weka.core.Instances;
import weka.core.converters.ArffLoader;

public class SingleTest {

    public static void Jalankan(String isi){
        KoneksiAini theKoneksi = new KoneksiAini();

        try {
            theKoneksi.connectFirst();
            String perintah = "delete from word_frec_no_testing";
            theKoneksi.executeUpdate(perintah);

            GetWords(isi);
        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }

        UpdateTFIDF();
    }

    public static void GetWords(String str){
        KoneksiAini kon = new KoneksiAini();

        String content = str;
        String[] contentToken = content.split(" ");

        String sql = "";
        String kata = "";
        int frekuensi;
        double berat;

        String title = contentToken[0];
        String text = contentToken[0];

        Vektor vektor = preprocessing(content);

        for(int i = 0; i < vektor.getSize(); i++){
            VektorNode vn = vektor.getTermNode(i);
            //hasil+=(vn.getWord()+ "->" + vn.getFreq() + " ");
            kata = vn.getWord();
            frekuensi = vn.getFreq();
            berat = 1 + Math.log10(frekuensi);//Rumus untuk Wt,d

            //Hanya mencari Wt,d yang frekuensinya lebih dari 2
            if (frekuensi>2){
                try {
                    kon.connectFirst();
                    //Generate Term Frequency
                    sql = "INSERT INTO `word_frec_no_testing` (`Nomor_Dokumen`, `Kata`, `Frekuensi`, `Berat`) VALUES (1, '" + kata + "', " + frekuensi + ", " + berat + ")";
                    //System.out.println(sql);
                    kon.executeUpdate(sql);
                } catch (SQLException ex) {
                    Logger.getLogger(MainV2_TFIDF_Training.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
        }
    }

    public static void BuildARFF() throws IOException{
        KoneksiAini theKoneksi = new KoneksiAini();

        String isi = "";
        String prepare = "";

        try {
            theKoneksi.connectFirst();
            String sql = "Select Word from Word_IDF order by Word";
            ResultSet res = theKoneksi.executeSelect(sql);
            while (res.next()){
                isi=(res.getString(1));
                prepare = prepare + isi + ",";

                //System.out.println(isi);
            }
        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
        prepare = prepare + "class_kat\n";

        try {
            //Pengulangan menurut nomor dokumen
            //Lebih bagus jika menggunakan Multithreading (di atas dokumen ke-50 proses komputasi melambat)
            theKoneksi.connectFirst();
            for (int j=1; j<=1; j++){
                String sql = "Select Word from Word_IDF order by Word";
                ResultSet res = theKoneksi.executeSelect(sql);

                System.out.println("Dokumen ke : " + j);

                while (res.next()){
                    isi=(res.getString(1));
                    String sql2 = "select count(*) as Hasil from word_frec_no_testing where Kata = '" + isi + "' and Nomor_Dokumen = " + j;
                    //System.out.println(sql2);

                    ResultSet res2 = theKoneksi.executeSelect(sql2);


                    int jumlah = 0;
                    while (res2.next()){
                        jumlah = Integer.parseInt(res2.getString(1));
                    }

                    //Kalau dokumen mengandung kata yang dihitung IDF-nya
                    if (jumlah>0){
                        String sql3 = "select TF_IDF from word_frec_no_testing where Kata = '" + isi + "' and Nomor_Dokumen = " + j;
                        //System.out.println(sql3);
                        ResultSet res3 = theKoneksi.executeSelect(sql3);


                        double jumlah2 = 0.00;

                        while (res3.next()){
                            jumlah2 = Double.parseDouble(res3.getString(1));
                        }

                        prepare = prepare + jumlah2 + ",";
                    }
                    else{
                        prepare = prepare + "0,";
                    }

                }

                prepare = prepare + "?\n";
            }

            //System.out.println("keluar");
            
            try {
                FileWriter fstream = new FileWriter("test_TFIDF.csv");
                BufferedWriter out = new BufferedWriter(fstream);
                out.write(prepare);
                out.close();
            } catch (IOException ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }

            CSVtoArff konv = new CSVtoArff("test_TFIDF.csv", "test_TFIDF.arff");

            konv.Convert();

            try {
                BufferedReader in = new BufferedReader(new FileReader("test_TFIDF.arff"));
                PrintWriter writer =  new PrintWriter(new BufferedWriter(new FileWriter("test_TFIDF1.arff")));
                String line;

                while((line = in.readLine()) != null)
                {
                    //System.out.println(line);
                    if(line.contains("class_kat"))
                    {
                       // line.replace(oldChar, newChar)
                         line = "@attribute class_kat {Agama,Ekonomi,Infotainment,Olahraga,Politik,Teknologi,Budaya,Hukum,Kesehatan}";
                         //System.out.println(line);
                    }

                    writer.println(line);

                    
                }
                writer.close();
                in.close();
            } catch (FileNotFoundException ex) {
                Logger.getLogger(SingleTest.class.getName()).log(Level.SEVERE, null, ex);
            }

        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    public static String TestSingle(String pathModel) throws Exception{
        String hasil = "";
        ArffLoader testLoader = new ArffLoader();


        try {
            testLoader.setSource(new File("test_TFIDF1.arff"));
            Instances test = testLoader.getDataSet();
            test.setClassIndex(test.numAttributes() - 1);

            //System.out.println("gogogo");
            
            SerializedClassifier nb = LoadModel(pathModel);

            //for (int i = 0; i < test.numInstances(); i++) {
                double pred = nb.classifyInstance(test.instance(0));
                hasil = (test.classAttribute().value((int) pred));
            //}
        } catch (IOException ex) {
            Logger.getLogger(MainV6_Evaluate.class.getName()).log(Level.SEVERE, null, ex);
        }

        return hasil;
    }

    public static SerializedClassifier LoadModel(String pathToModel){
        SerializedClassifier classifier = new SerializedClassifier();
        classifier.setModelFile(new File(pathToModel));

        return classifier;
    }

    public static Vektor preprocessing(String text) {
        try {
            KoneksiAini theKoneksi = new KoneksiAini();
            int pilihanStemmer = 3;
            theKoneksi.connectFirst();
            Dokumen dok;
            Terms terms = new Terms(theKoneksi);
            Kamus kamus = new Kamus(theKoneksi);
            kamus.initialize();
            dok = new Dokumen("1", text, kamus, terms, true);
            dok.doLowerCaseIsi();
            dok.doFiltering();
            dok.doTermExtraction();
            dok.doStemming(pilihanStemmer);
            return dok.getVec();

        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
        return null;
    }

    public static void UpdateTFIDF(){
        String kata="";
        double idf;

        KoneksiAini theKoneksi = new KoneksiAini();
        try {
            theKoneksi.connectFirst();
            String sql = "Select Word, IDF from Word_IDF";
            ResultSet res = theKoneksi.executeSelect(sql);
            while (res.next()){
               kata = res.getString(1);
               idf = Double.parseDouble(res.getString(2));

               String sql2 = "Update word_frec_no_testing set TF_IDF = Berat * " + idf + "where Kata = '" + kata + "'";
               //System.out.println(sql2);
               theKoneksi.executeUpdate(sql2);

            }
        } catch (SQLException ex) {
            Logger.getLogger(MainV2_TFIDF_Training.class.getName()).log(Level.SEVERE, null, ex);
        }

    }
}
