/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package getrootbahasa;

/**
 *
 * @author WaOnEmperoR
 */
import com.mysql.jdbc.PreparedStatement;
import com.stemaini.core.doc.Dokumen;
import com.stemaini.core.doc.Terms;
import com.stemaini.core.doc.Vektor;
import com.stemaini.core.database.Kamus;
import com.stemaini.core.doc.VektorNode;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import taini.koneksi.KoneksiAini;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.misc.SerializedClassifier;
import weka.core.Debug;
import weka.core.Instances;
import weka.core.converters.ArffLoader;

public class Main {

    /**
     * @param args the command line arguments
     */
    Kamus kamus;
    Terms terms;
    KoneksiAini theKoneksi;
     int TANPA_STEMMER = 1;
    public final static int STANDARD_CS_STEMMER = 2;
    public final static int ENHANCED_CS_STEMMER = 3;
    private int pilihanStemmer;
    private LinkedList<String> daftarDok;
    private boolean isUsingStoplist;
    private String skk = "Hai Manusia";

    public static void main(String[] args) throws IOException {
        // TODO code application logic here
        int jum;
        String jadi;
        String sHasil[];
        HashMap<String, Integer> hmHasil = null;

        KoneksiAini theKoneksi = new KoneksiAini();

        String [] target = {"Hukum", "Politik", "Olahraga", "Pendidikan"};
        /*try {
            theKoneksi.connectFirst();
            String sql = "Select count(*) from Kamus";
            ResultSet res = theKoneksi.executeSelect(sql);
            while (res.next()){
                jum=Integer.parseInt(res.getString(1));

                System.out.println(jum);
            }
        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }*/

        String prepare="";
        List<Set<String>> wordPerDoc = new ArrayList<Set<String>>();
        List<String> listKategori = new ArrayList<String>();

        Set<String> LargeSet = new HashSet<String>();
        Set<String> kumpulan = new HashSet<String>();

//        //-----------------//
//        Set<String> kumpulan = GetWords("Oknum tentara dipecat secara tidak terhormat");
//        wordPerDoc.add(kumpulan);
//
//        Iterator itr = kumpulan.iterator();
//        while(itr.hasNext()) {
//           Object element = itr.next();
//           System.out.print(element.toString() + "; ");
//
//           if (!LargeSet.contains(element.toString())){
//                LargeSet.add(element.toString());
//           }
//        }
//        System.out.println();
//        //-----------------//
//        kumpulan = GetWords("Pakde Karwo memenangkan pilkada Jatim");
//        wordPerDoc.add(kumpulan);
//
//        itr = kumpulan.iterator();
//        while(itr.hasNext()) {
//           Object element = itr.next();
//           System.out.print(element.toString() + "; ");
//
//           if (!LargeSet.contains(element.toString())){
//                LargeSet.add(element.toString());
//           }
//        }
//        System.out.println();
//        //-----------------//
//        kumpulan = GetWords("Pakde Budi membeli mangga di pasar");
//        wordPerDoc.add(kumpulan);
//
//        itr = kumpulan.iterator();
//        while(itr.hasNext()) {
//           Object element = itr.next();
//           System.out.print(element.toString() + "; ");
//
//           if (!LargeSet.contains(element.toString())){
//                LargeSet.add(element.toString());
//           }
//        }
//        System.out.println();
//        //----------------//
//        itr = LargeSet.iterator();
//        while(itr.hasNext()) {
//           Object element = itr.next();
//           prepare = prepare + element.toString() + ",";
//        }
//        prepare = prepare + "kategori" + "\n";

        String isi = "";
        String kat ="";
        Iterator itr;
        int counter=1;


        try {
            theKoneksi.connectFirst();
            String sql = "Select Isi, Kategori from Berita";
            ResultSet res = theKoneksi.executeSelect(sql);
            while (res.next()){
                isi=(res.getString(1));
                kat=(res.getString(2));

                System.out.println(counter++);

                kumpulan = GetWords(isi);
                wordPerDoc.add(kumpulan);

                itr = kumpulan.iterator();
                while(itr.hasNext()) {
                   Object element = itr.next();

                   if (!LargeSet.contains(element.toString())){
                        LargeSet.add(element.toString());
                   }
                }

                listKategori.add(kat);
                //System.out.println(jum);
            }
        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }

        itr = LargeSet.iterator();
        while(itr.hasNext()) {
           Object element = itr.next();
           prepare = prepare + element.toString() + ",";
        }
        prepare = prepare + "waonemperor" + "\n";

       System.out.println("Jumlah kata unik : "+LargeSet.size());

       //Pengulangan untuk mendapatkan kemunculan kata per dokumen
        for (int j=0; j<wordPerDoc.size(); j++){
            //Mendapatkan kata-kata unik yang ada di seluruh dokumen
            System.out.println("sudah sampai : " + (j+1));
            itr = LargeSet.iterator();

            while(itr.hasNext()) {
               Object element = itr.next();

               //Mengecek apakah dokumen ke-j mengandung word tersebut
               if (wordPerDoc.get(j).contains(element.toString())){
                   prepare = prepare + "Y,";
               }
               else{
                   prepare = prepare + "N,";
               }
            }
            prepare = prepare + listKategori.get(j) + "\n";
        }
        System.out.println(prepare);

        try {
            FileWriter fstream = new FileWriter("prepare.csv");
            BufferedWriter out = new BufferedWriter(fstream);
            out.write(prepare);
            out.close();
        } catch (IOException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }

        CSVtoArff konv = new CSVtoArff("prepare.csv", "dataset.arff");

        konv.Convert();

        FileOutputStream out;
        try {
            out = new FileOutputStream("atribut.kolom");
            ObjectOutputStream oout = new ObjectOutputStream(out);

            oout.writeObject(LargeSet);
            oout.flush();
        } catch (FileNotFoundException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }

        ObjectInputStream ois = new ObjectInputStream(new FileInputStream("atribut.kolom"));

        Set<String> hasil = new HashSet<String>();
        try {
            hasil = (Set<String>) ois.readObject();
            //try {
            //            TrainAndSaveModel("dataset.arff");
            //        } catch (Exception ex) {
            //            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            //        }
        } catch (ClassNotFoundException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }

        //System.out.println(hasil + " : " + hasil.get(2));
    }

    public static void TrainAndSaveModel(String dataTraining) throws Exception{
        NaiveBayes nb = new NaiveBayes();
        
        try {
            ArffLoader loader = new ArffLoader();
            loader.setSource(new File(dataTraining));
            Instances train = loader.getDataSet();
            train.setClassIndex(train.numAttributes() - 1);
            nb.buildClassifier(train);

            Debug.saveToFile("trainModelNB", nb);

        } catch (IOException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    public static SerializedClassifier LoadModel(String pathToModel){
        SerializedClassifier classifier = new SerializedClassifier();
        classifier.setModelFile(new File(pathToModel));
        
        return classifier;
    }

     public static HashMap<String, Integer> countWord(String word[])
    {
        HashMap<String, Integer> result = new HashMap<String, Integer>();
        String wordTemp;
        Integer count, k;


        for(int i =0; i < word.length; i++)
        {
            k = 0;
            wordTemp = word[i];
            count = 0;

            if(!result.containsKey(wordTemp))
            {
                for(int j=0; j < word.length; j++)
                {
                    if(wordTemp.equalsIgnoreCase(word[j]))
                    {
                        count++;
                    }
                }

                result.put(wordTemp, count);
            }
        }

        return result;
    }

    public static String TextProcessing(String str){
        String content = str;
        String[] contentToken = content.split(" ");

        String title = contentToken[0];
        String text = contentToken[0];

        //TextPreprocessing preprocessing = new TextPreprocessing();
        Vektor vektor = preprocessing(content);
        String hasil="";

        for(int i = 0; i < vektor.getSize(); i++){
            VektorNode vn = vektor.getTermNode(i);
            hasil+=(vn.getWord()+ "->" + vn.getFreq() + " ");
        }
        return hasil;
    }

    public static Set<String> GetWords(String str){
        String content = str;
        String[] contentToken = content.split(" ");

        Set<String> newSet = new HashSet<String>();

        String title = contentToken[0];
        String text = contentToken[0];

        //TextPreprocessing preprocessing = new TextPreprocessing();
        Vektor vektor = preprocessing(content);


        for(int i = 0; i < vektor.getSize(); i++){
            VektorNode vn = vektor.getTermNode(i);
            newSet.add(vn.getWord());
            //hasil+=(vn.getWord()+ "->" + vn.getFreq() + " ");
        }
        return newSet;
    }

     public static Vektor preprocessing(String text) {
        try {
            KoneksiAini theKoneksi = new KoneksiAini();
            int pilihanStemmer = 3;
            theKoneksi.connectFirst();
            Dokumen dok;
            Terms terms = new Terms(theKoneksi);
            Kamus kamus = new Kamus(theKoneksi);
            kamus.initialize();
            dok = new Dokumen("1", text, kamus, terms, true);
            dok.doLowerCaseIsi();
            dok.doFiltering();
            dok.doTermExtraction();
            dok.doStemming(pilihanStemmer);
            return dok.getVec();

        } catch (SQLException ex) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
        }
        return null;
    }


}
