package ac.manchester.cs.afzal.tcm_processing.terms_sentence_handler;


import ac.manchester.cs.afzal.tcm_processing.miscellaneous.File_Manager_Impl;
import ac.manchester.cs.afzal.tcm_processing.miscellaneous.Vector_Builder_Impl;
import ac.manchester.cs.afzal.tcm_processing.sql_handler.SQL_Handler_Terms_Impl;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;


/**
 * @author Hammad Afzal
 * @version 1.0
  */


public class Term_Mapping_Impl
{
    private String MODE = "MODE";
    private String database_url = "database_url";
    private String username = "username";
    private String password = "password";

    private String MAP_CORPUS = "MAP_CORPUS";
    private String MAP_TRAINING = "MAP_TRAINING";
    private String SEED_FILE_PATH = "SEED_FILE_PATH";

    private File_Manager_Impl fileManager = new File_Manager_Impl();
    private Vector_Builder_Impl vectorBuilder = new Vector_Builder_Impl();
    private SQL_Handler_Terms_Impl sqlHandler;

    public Term_Mapping_Impl()
    {
        Properties props = new Properties();
        try
        {
            props.load(new FileInputStream("Term_Classification.properties"));
        
            database_url = props.getProperty(database_url);
            username = props.getProperty(username);
            password = props.getProperty(password);

            MAP_TRAINING = props.getProperty(MAP_TRAINING);
            MAP_CORPUS = props.getProperty(MAP_CORPUS);
            SEED_FILE_PATH = props.getProperty(SEED_FILE_PATH);
            MODE = props.getProperty(MODE);
        }
        catch (IOException ex)
        {
            Logger.getLogger(Term_Mapping_Impl.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    public void terms_mapping(String terms_mode)
    {
        String[] tables = {"terms", "term_mapping"};
        sqlHandler = new SQL_Handler_Terms_Impl();
        sqlHandler.initialize_parameters(database_url, username, password);
        sqlHandler.connect_db();
        sqlHandler.reset_tables(tables);

        int count1 = 0;
        int count2 = 0;
        StringBuffer contents;
        File file_dir;

        String[] list_files;
        Vector normalized_terms, term_occurences;
        Vector[] terms_occurences_array;
        
        file_dir = MODE.equals("Corpus") ? new File(MAP_CORPUS) : new File(MAP_TRAINING);
        list_files = file_dir.list();

        for(String file_name : list_files)
        {
            int doc_id = 0;

            contents = (MODE.equals("Training"))? fileManager.fileReader(MAP_TRAINING + "//" + file_name):fileManager.fileReader(MAP_CORPUS + "//" + file_name);
            normalized_terms = new Vector();
            term_occurences = new Vector();

            file_name = file_name.substring(0, file_name.indexOf(".clist"));
            doc_id = (terms_mode.equals("unified_terms")) ? 0 : sqlHandler.get_document_id_per_doc_value(file_name);

            int index, index1, index2, index3;
            String str = "";
            index = 0;
            int no_of_terms = 0;

            /************************ This loop counts number of terms in a document ****************************/
            while((index = contents.indexOf("<map>", index)) != -1)
            {
                index = index + 5;
                index1 = contents.indexOf("</map>", index);
                index = index1 + 6;
                no_of_terms ++;
            }
            terms_occurences_array = new Vector[no_of_terms];

            int i = 0;
            index = 0;
            /************************ This loop makes Vector of Real Terms and Occurences and put in Real_Terms Database ****************************/
            while((index = contents.indexOf("<term>", index)) != -1)
            {
                i ++;
                index = index + 6;
                index1 = contents.indexOf("</term>", index);
                str = contents.substring(index, index1);
                normalized_terms.add(str.replaceAll("'", ""));


                String[] doc_table1 =
                {
                    "Value", "Doc_ID", "Type"
                };
                String[] doc_table_value1 =
                {
                    str, Integer.toString(doc_id), MODE
                };

                sqlHandler.insert_table("Terms", doc_table1, doc_table_value1);

                index = index1 + 7;
                index2 = contents.indexOf("<occurence>", index);
                index3 = contents.indexOf("</occurence>", index2 + 11);
                index = index3 + 17;

                term_occurences.add(contents.substring(index2 + 12, index3 - 1));
            }
            /************************ This loop make entries into Term Mapping ****************************/
            for(i = 0; i < no_of_terms; i ++)
            {
                String term = normalized_terms.elementAt(i).toString();
                int normalized_term_id;

                terms_occurences_array[i] = new Vector();
                terms_occurences_array[i] = vectorBuilder.string_to_vector(term_occurences.elementAt(i).toString(), ",");

                for(int k = 0; k < terms_occurences_array[i].size(); k ++)
                {
                    String value_occurrence = terms_occurences_array[i].elementAt(k).toString();
                    value_occurrence = value_occurrence.replaceAll("'", "");
                    Vector occurrence_ids = sqlHandler.get_occurrence_id_per_occurrence_value(value_occurrence, Integer.toString(doc_id));
                    Vector doc_ids = sqlHandler.get_doc_ids();

                    for(Object occurrence_id: occurrence_ids)
                    {
                        String occurrence_id_string = occurrence_id.toString();
                        normalized_term_id = sqlHandler.get_term_id(term, doc_id);
                        term = Integer.toString(normalized_term_id);

                        if(normalized_term_id == 0)
                        {
                            count1 ++;
                        }
                        else
                        {
                            String[] doc_table1 =
                            {
                                "Term_ID", "Doc_ID", "Occurrence_ID"
                            };
                            String[] doc_table_value1 =
                            {
                                term, Integer.toString(doc_id), occurrence_id_string
                            };
                            sqlHandler.insert_table("Term_Mapping", doc_table1, doc_table_value1);
                            count2 ++;
                        }
                    }
                }
            }
        }
        System.out.println("Unsuccessful Mappings  : " + count1 + " Successful Mappings : " + count2);
    }

    public void loadSeedTerms()
    {
        Vector seed_terms = new Vector();
        Vector occurrence_ids = new Vector();

        StringBuffer contents;

        contents = fileManager.fileReader(SEED_FILE_PATH);
        seed_terms = vectorBuilder.string_to_vector(contents.toString(), "\r\n");

        for (int i = 0; i < seed_terms.size();i++)
        {
            String seed_term = seed_terms.elementAt(i).toString();
            occurrence_ids = sqlHandler.get_occurrence_ids_per_occurrence_values(seed_term);

            for (int j = 0; j < occurrence_ids.size();j++)
            {
                String occurrence_id = occurrence_ids.elementAt(j).toString();
                int term_id = sqlHandler.get_term_id_from_map(occurrence_id);
                sqlHandler.update_term_table(term_id, "task");
            }
        }
        for(int i = 0; i < seed_terms.size(); i ++)
        {
            String seed_term = seed_terms.elementAt(i).toString();
            sqlHandler.update_term_table(seed_term, "task");
            seed_term = "'" + seed_term + "'";
            seed_terms.setElementAt(seed_term, i);
        }
        System.out.println(seed_terms.toString());
    }
}