package ac.manchester.cs.afzal.tcm_processing.sql_handler;

import java.io.FileInputStream;
import java.io.IOException;
import java.sql.*;
import java.util.*;
import java.text.DecimalFormat;
import java.util.logging.Level;
import java.util.logging.Logger;

/**
 * Title: TCM - Processing
 *
 * @author Hammad Afzal
 * @version 1.0
 * Last Modification Date: 16th Jan, 2013
 */
public class SQL_Handler_Comparisons_Impl extends SQL_Handler_Impl
{

    private Vector term_ids = new Vector();
    private Vector training_term_ids = new Vector();
    private Vector test_term_ids = new Vector();
    private Vector whole_corpus_terms = new Vector();
    private Vector whole_training_terms = new Vector();
    private Vector whole_seed_context_nouns = new Vector();
    private Vector whole_seed_context_verbs = new Vector();
    private Vector whole_seed_context_patterns = new Vector();
    private Vector[] context_nouns, context_verbs, context_patterns,
            seed_context_nouns, seed_context_verbs, seed_context_patterns;

    public SQL_Handler_Comparisons_Impl()
    {
        try
        {
            Properties props = new Properties();
            props.load(new FileInputStream("Term_Classification.properties"));
        }
        catch (IOException ex)
        {
            Logger.getLogger(SQL_Handler_Comparisons_Impl.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    /**
     * Returns stem value for the respective noun/verb
     * @param noun_verb
     * @param mode
     * @return
     */
    public String get_stem(String noun_verb, String mode)
    {
        String stem = "";
        String query = "";
        try
        {
            statement = connection.createStatement();
            if (mode.equals("noun"))
            {
                query = "SELECT Stem FROM Stem_Map_Ns where Noun = '" + noun_verb + "'";
            }
            else
            {
                query = "SELECT Stem FROM Stem_Map_Vs where Verb = '" + noun_verb + "'";
            }

            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                stem = rs.getString("Stem");
            }

            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("Value of SQL Exception in getting Stem : " + sqlex.getMessage());
        }

        if (stem.equals(""))
        {
            return noun_verb;
        }
        else
        {
            return stem;
        }
    }

    /**
     * Returns Doc_ID when document name is given
     * @param document_name
     * @return
     */
    public int get_document_id(String document_name)
    {
        int doc_id = 0;
        try
        {
            statement = connection.createStatement();
            String query = "SELECT Doc_ID FROM documents WHERE Name = '" + document_name + "'";
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                doc_id = rs.getInt("Doc_ID");
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("Value of SQL Exception in getting Document ID from PMCID : " + sqlex.getMessage());
        }
        return doc_id;
    }

    /**
     * Returns Sen_ID when Doc_ID and Local_Sen_ID are given
     * @param doc_id
     * @param local_sen_id
     * @return
     */
    public int get_sentence_id(int doc_id, int local_sen_id)
    {
        int sen_id = 0;
        try
        {
            statement = connection.createStatement();
            String query = "SELECT Sen_ID FROM Sentences WHERE Doc_ID = '" + doc_id + "' and Local_Sen_ID = '" + local_sen_id + "'";
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                sen_id = rs.getInt("Sen_ID");
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception . Sentenec ID from Document ID and Sentence ID is : " + sqlex.getMessage());
        }

        return sen_id;
    }

    /**
     * Returns Sen_ID when Doc_ID and Local_Sen_ID are given
     * @param sen_id
     * @return
     */
    public int get_occurrence_id(int sen_id)
    {
        int occurrence_id = 0;
        try
        {
            statement = connection.createStatement();
            String query = "SELECT Occurrence_ID FROM Occurrences WHERE Sen_ID= '" + sen_id + "'";
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                occurrence_id = rs.getInt("Occurrence_ID");
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception . Sentenec ID from Document ID and Sentence ID is : " + sqlex.getMessage());
        }

        return occurrence_id;
    }

    /**
     * Returns Noun_ID when noun string is given
     * @param noun
     * @return
     */
    public int get_noun_id(String noun)
    {
        int noun_id = 0;
        try
        {
            statement = connection.createStatement();
            String query = "SELECT Noun_ID FROM Nouns WHERE Value = '" + noun + "'";
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                noun_id = rs.getInt("Noun_ID");
            }

            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Getting Noun ID from value of noun : " + sqlex.getMessage());
        }
        return noun_id;
    }

    /**
     * Returns Verb_ID when verb string is given
     * @param verb_string
     * @return
     */
    public int get_verb_id(String verb_string)
    {
        int verb_id = 0;
        try
        {
            statement = connection.createStatement();
            String query = "SELECT Verb_ID FROM Verbs WHERE Value = '" + verb_string + "'";
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                verb_id = rs.getInt("Verb_ID");
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Getting Verb ID from value of verb : " + sqlex.getMessage());
        }
        return verb_id;
    }

    /**
     * Returns all Documents names which have Type = Corpus
     * @return
     */
    public Vector<String> get_all_corpus_documents()
    {
        Vector<String> docs = new Vector();
        String query;
        try
        {
            query = "SELECT distinct Name FROM Documents where Type = 'Corpus'";
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                docs.add(rs.getString("Name"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Get list of all Corpus Documents: " + sqlex.getMessage());
        }
        return docs;
    }

    /**
     * Retrieves all corpus terms from Terms table
     * @return
     */
    public void get_all_corpus_terms()
    {
        term_ids.clear();
        String query;
        try
        {
            query = "SELECT Term_ID, Value FROM Terms where Type = 'Corpus' and Value NOT LIKE '%.%' and "
                    + "Value NOT IN (SELECT distinct Value FROM Terms where Type = 'Training')";

//            query = "SELECT Term_ID, Value FROM Terms where Value = 'ischemic heart disease'";

            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                whole_corpus_terms.add(rs.getString("Value"));
                term_ids.add(rs.getInt("Term_ID"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Get Real Term Per Document: " + sqlex.getMessage());
        }
    }

    /**
     * Returns all Documents names which have Type = Training
     * @return
     */
    public void get_all_training_terms()
    {
        training_term_ids.clear();
        String query;
        try
        {
            query = "SELECT Term_ID, Value FROM Terms where Type = 'Training' and Value NOT LIKE '%.%' ";
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);

            while (rs.next())
            {
                whole_training_terms.add(rs.getString("value"));
                training_term_ids.add(rs.getInt("Term_ID"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Get Training Terms: " + sqlex.getMessage());
        }
    }

    /**
     * Retrieves the corpus terms which appear in the documents whose values are given in comma-separated-file
     * @param csv_document_names
     */
    public void get_selective_corpus_terms(String csv_document_names)
    {
        term_ids.clear();
        try
        {
            String query = "SELECT t.Value,t.Term_ID FROM Terms t,Documents d where t.Doc_ID = d.Doc_ID and d.Value IN (" + csv_document_names + ")"
                    + " and t.Value NOT IN (SELECT distinct t.value FROM terms t,documents d where t.Doc_ID = d.Doc_ID and d.Type = 'Training')";
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                whole_corpus_terms.add(rs.getString("Value"));
                term_ids.add(rs.getInt("Term_ID"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Get Real Term Per Document: " + sqlex.getMessage());
        }
    }

    public void perform_comparison_nouns()
    {
        prepare_noun_profiles_for_comparison();
        measure_noun_relevance();
    }

    public void perform_comparison_verbs()
    {
        prepare_verb_profiles_for_comparison();
        measure_verb_relevance();
    }

    public void perform_comparison_patterns()
    {
        prepare_pattern_profiles_for_comparison();
        measure_pattern_relevance();
    }

    private void prepare_noun_profiles_for_comparison()
    {
        seed_context_nouns = new Vector[whole_training_terms.size()];
        context_nouns = new Vector[get_whole_corpus_terms().size()];



        /************ This calculates Full Context Noun Profile of whole seed terms **********************/
        try
        {
            statement = connection.createStatement();
            String query = "SELECT n.Value FROM occurrences o, documents d,Nouns n, Noun_Profiles np "
                    + " where o.Doc_ID = d.Doc_ID and d.Type = 'Training' and o.Occurrence_ID "
                    + "= np.Occurrence_ID and np.Noun_ID = n.Noun_ID";

            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                whole_seed_context_nouns.add(rs.getString("Value"));
            }

            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception RP_ID : " + sqlex.getMessage());
        }

        /************ This calculates Noun Context of all corpus terms separately **********************/
        try
        {
            String query = "SELECT n.Value FROM Terms t, Term_Mapping tm, Nouns n, Noun_Profiles np "
                    + "where t.Term_ID = ? and t.Term_ID = tm.Term_ID and tm.Occurrence_ID = "
                    + "np.Occurrence_ID and np.Noun_ID = n.Noun_ID";

            PreparedStatement statement = connection.prepareStatement(query);
            for (int i = 0; i < get_whole_corpus_terms().size(); i++)
            {
                context_nouns[i] = new Vector();
                statement.setString(1, get_term_ids().elementAt(i).toString());
                ResultSet rs = statement.executeQuery();
                while (rs.next())
                {
                    context_nouns[i].add(rs.getString("Value"));
                }
            }
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in Getting Noun Profiles of each corpus term : " + sqlex.getMessage());
        }

        /************ This calculates Noun Context of all training terms separately **********************/
        try
        {
            String query = "SELECT n.Value FROM Terms t, Term_Mapping tm, Nouns n, Noun_Profiles np where "
                    + "t.Term_ID = ? and t.Term_ID = tm.Term_ID and tm.Occurrence_ID = np.Occurrence_ID "
                    + "and np.Noun_ID = n.Noun_ID";

            PreparedStatement statement = connection.prepareStatement(query);
            for (int i = 0; i < whole_training_terms.size(); i++)
            {
                seed_context_nouns[i] = new Vector();
                statement.setString(1, training_term_ids.elementAt(i).toString());
                ResultSet rs = statement.executeQuery();
                while (rs.next())
                {
                    seed_context_nouns[i].add(rs.getString("Value"));
                }
            }
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in Getting Noun Profiles of each training term : " + sqlex.getMessage());
        }
    }

    private void measure_noun_relevance()
    {
        Vector intersection_1 = new Vector();
        Vector normalized_relevance_1 = new Vector();
        Double[] normalized_relevance_2 = new Double[get_whole_corpus_terms().size()];
        Double[] normalized_relevance_3 = new Double[get_whole_corpus_terms().size()];

        Vector overlapped_nouns;

        int size_seed_nouns = 0;
        size_seed_nouns = whole_seed_context_nouns.size();


        /***************************** Calculation of Intersection with Whole Seed Terms ****************/
        for (int j = 0; j < context_nouns.length; j++)
        {
            overlapped_nouns = new Vector();
            Iterator vectorIterator = whole_seed_context_nouns.iterator();
            while (vectorIterator.hasNext())
            {
                String vector_noun = vectorIterator.next().toString();
                if (context_nouns[j].contains(vector_noun))
                {
                    overlapped_nouns.add(vector_noun);
                }
            }
            intersection_1.add(overlapped_nouns.size());
        }

        /***************************** Calculation of Intersection with Separate Seed Terms ****************/
        for (int j = 0; j < context_nouns.length; j++)
        {
            normalized_relevance_2[j] = 0.0;
            normalized_relevance_3[j] = 0.0;
            for (int l = 0; l < seed_context_nouns.length; l++)
            {
                overlapped_nouns = new Vector();
                Iterator vectorIterator = seed_context_nouns[l].iterator();
                while (vectorIterator.hasNext())
                {
                    String seed_context_noun = vectorIterator.next().toString();
                    if (context_nouns[j].contains(seed_context_noun))
                    {
                        overlapped_nouns.add(seed_context_noun);
                    }
                }
                int inter = overlapped_nouns.size();
                double relevance_double = 2 * inter;
                double normalizer_double = context_nouns[j].size() + seed_context_nouns[l].size();

                relevance_double = (normalizer_double == 0) ? 0 : relevance_double / normalizer_double;
                normalized_relevance_2[j] = normalized_relevance_2[j] + relevance_double;
                normalized_relevance_3[j] = (relevance_double < normalized_relevance_3[j]) ? normalized_relevance_3[j] : relevance_double;
            }
            normalized_relevance_2[j] = normalized_relevance_2[j] / whole_training_terms.size();
        }

        /************* Calculation of relevance values for Whole Seed Terms ******************/
        for (int k = 0; k < context_nouns.length; k++)
        {
            double relevance_double = 2 * Double.parseDouble(intersection_1.elementAt(k).toString());
            double normalizer_double = context_nouns[k].size() + size_seed_nouns;
            relevance_double = relevance_double / normalizer_double;
            normalized_relevance_1.add(Double.toString(relevance_double));
        }

        for (int i = 0; i < get_whole_corpus_terms().size(); i++)
        {
            String relevance_str_1 = normalized_relevance_1.elementAt(i).toString();
            String relevance_str_2 = normalized_relevance_2[i].toString();
            String relevance_str_3 = normalized_relevance_3[i].toString();
            String term_id = get_term_ids().elementAt(i).toString();

            String[] doc_table1 =
            {
                "Term_ID", "Relevance_CNR_1", "Relevance_CNR_2", "Relevance_CNR_3"
            };
            String[] doc_table_value1 =
            {
                term_id, relevance_str_1, relevance_str_2, relevance_str_3
            };

            insert_table("Result_Noun", doc_table1, doc_table_value1);
        }
    }

    /**
     * Measures the Verb Relevances
     */
    private void prepare_verb_profiles_for_comparison()
    {
        seed_context_verbs = new Vector[whole_training_terms.size()];
        context_verbs = new Vector[get_whole_corpus_terms().size()];

        /************ This calculates Full Context Verb Profile of whole seed terms **********************/
        try
        {
            statement = connection.createStatement();
            String query = "SELECT v.Value FROM Occurrences o, Documents d,Verbs v, Verb_Profiles vp "
                    + " where o.Doc_ID = d.Doc_ID and d.Type = 'Training' and o.Occurrence_ID = "
                    + "vp.Occurrence_ID and vp.Verb_ID = v.Verb_ID";

            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                whole_seed_context_verbs.add(rs.getString("Value"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception RP_ID : " + sqlex.getMessage());
        }
        /************ This calculates Verb Context of all corpus terms separately **********************/
        try
        {
            String query = "SELECT v.Value FROM Terms t, Term_Mapping tm, Verbs v, Verb_Profiles vp"
                    + " where t.Term_ID = ? and t.Term_ID = tm.Term_ID and tm.Occurrence_ID = "
                    + "vp.Occurrence_ID and vp.Verb_ID = v.Verb_ID";

            PreparedStatement statement = connection.prepareStatement(query);
            for (int i = 0; i < get_whole_corpus_terms().size(); i++)
            {
                context_verbs[i] = new Vector();
                statement.setString(1, get_term_ids().elementAt(i).toString());
                ResultSet rs = statement.executeQuery();
                while (rs.next())
                {
                    context_verbs[i].add(rs.getString("Value"));
                }
            }
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in Getting Noun Profiles of each corpus term : " + sqlex.getMessage());
        }

        /************ This calculates Verb Context of all training terms separately **********************/
        try
        {
            String query = "SELECT v.Value FROM Terms t, Term_Mapping tm, Verbs v, Verb_Profiles vp where "
                    + "t.Term_ID = ? and t.Term_ID = tm.Term_ID and tm.Occurrence_ID = vp.Occurrence_ID"
                    + " and vp.Verb_ID = v.Verb_ID";

            PreparedStatement statement = connection.prepareStatement(query);
            for (int i = 0; i < whole_training_terms.size(); i++)
            {
                seed_context_verbs[i] = new Vector();
                statement.setString(1, training_term_ids.elementAt(i).toString());
                ResultSet rs = statement.executeQuery();
                while (rs.next())
                {
                    seed_context_verbs[i].add(rs.getString("Value"));
                }
            }
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in Getting Verb Profiles of each training term : " + sqlex.getMessage());
        }
    }

    private void measure_verb_relevance()
    {
        Vector intersection_1 = new Vector();
        Vector normalized_relevance_1 = new Vector();
        Double[] normalized_relevance_2 = new Double[get_whole_corpus_terms().size()];
        Double[] normalized_relevance_3 = new Double[get_whole_corpus_terms().size()];

        Vector overlapped_verbs;

        int size_cpv_seeds = 0;
        size_cpv_seeds = whole_seed_context_verbs.size();

        /***************************** Calculation of Intersection with Whole Seed Terms ****************/
        for (int j = 0; j < context_verbs.length; j++)
        {
            overlapped_verbs = new Vector();
            Iterator vectorIterator = whole_seed_context_verbs.iterator();
            while (vectorIterator.hasNext())
            {
                String vector_verb = vectorIterator.next().toString();
                if (context_verbs[j].contains(vector_verb))
                {
                    overlapped_verbs.add(vector_verb);
                }
            }
            intersection_1.add(overlapped_verbs.size());
        }

        /***************************** Calculation of Intersection with Separate Seed Terms ****************/
        for (int j = 0; j < context_verbs.length; j++)
        {
            normalized_relevance_2[j] = 0.0;
            normalized_relevance_3[j] = 0.0;
            for (int l = 0; l < seed_context_verbs.length; l++)
            {
                overlapped_verbs = new Vector();
                Iterator vectorIterator = seed_context_verbs[l].iterator();
                while (vectorIterator.hasNext())
                {
                    String vector_verb = vectorIterator.next().toString();
                    if (context_verbs[j].contains(vector_verb))
                    {
                        overlapped_verbs.add(vector_verb);
                    }
                }
                int inter = overlapped_verbs.size();
                double relevance_double = 2 * inter;
                double normalizer_double = context_verbs[j].size() + seed_context_verbs[l].size();

                relevance_double = (normalizer_double == 0) ? 0 : relevance_double / normalizer_double;
                normalized_relevance_2[j] = normalized_relevance_2[j] + relevance_double;
                normalized_relevance_3[j] = (relevance_double < normalized_relevance_3[j]) ? normalized_relevance_3[j] : relevance_double;

            }
            normalized_relevance_2[j] = normalized_relevance_2[j] / whole_training_terms.size();
        }

        /************* Calculation of relevance values for Whole Seed Terms ******************/
        for (int k = 0; k < context_verbs.length; k++)
        {
            double relevance_double = 2 * Double.parseDouble(intersection_1.elementAt(k).toString());
            double normalizer_double = context_verbs[k].size() + size_cpv_seeds;
            relevance_double = relevance_double / normalizer_double;
            normalized_relevance_1.add(Double.toString(relevance_double));
        }

        //     Putting Values in Database
        for (int i = 0; i < get_whole_corpus_terms().size(); i++)
        {
            String relevance_str_1 = normalized_relevance_1.elementAt(i).toString();
            String relevance_str_2 = normalized_relevance_2[i].toString();
            String relevance_str_3 = normalized_relevance_3[i].toString();
            String term_id = get_term_ids().elementAt(i).toString();

            String[] doc_table1 =
            {
                "Term_ID", "Relevance_CVR_1", "Relevance_CVR_2", "Relevance_CVR_3"
            };
            String[] doc_table_value1 =
            {
                term_id, relevance_str_1, relevance_str_2, relevance_str_3
            };

            insert_table("Result_Verb", doc_table1, doc_table_value1);
        }


    }

    /**
     * Measures the Patterns Relevance
     */
    private void prepare_pattern_profiles_for_comparison()
    {
        seed_context_patterns = new Vector[whole_training_terms.size()];
        context_patterns = new Vector[get_whole_corpus_terms().size()];

        String query;
        ResultSet rs = null;

        /************ This calculates Full Context Pattern Profile of whole seed terms **********************/
        try
        {
            statement = connection.createStatement();
            query = "SELECT distinct p.Instance_1 FROM occurrences o, documents d,Left_Patterns p, "
                    + " where o.Doc_ID = d.Doc_ID and d.Type = 'Training' and o.Occurrence_ID = "
                    + "p.Occurrence_ID and p.Instance_1 != 'EMPTY'";

            rs = statement.executeQuery(query);
            while (rs.next())
            {
                whole_seed_context_patterns.add(rs.getString("Instance_1"));
            }

            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in selecting Instance_1 : " + sqlex.getMessage());
        }

        /************ This calculates Pattern Context of all corpus terms seperately **********************/
        try
        {
            query = "SELECT distinct p.Instance_1 FROM Terms t, Term_Mapping tm, Left_Patterns p where t.Term_ID = ? "
                    + "and t.Term_ID= tm.Term_ID and tm.Occurrence_ID = p.Occurrence_ID and p.Instance_1 != 'EMPTY'";

            PreparedStatement statement = connection.prepareStatement(query);
            for (int i = 0; i < get_whole_corpus_terms().size(); i++)
            {
                context_patterns[i] = new Vector();
                statement.setString(1, get_term_ids().elementAt(i).toString());
                rs = statement.executeQuery();
                while (rs.next())
                {
                    context_patterns[i].add(rs.getString("Instance_1"));
                }
            }
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in Getting Noun Profiles of each corpus term : " + sqlex.getMessage());
        }

        /************ This calculates Pattern Context of all training terms separately **********************/
        try
        {
            query = "SELECT distinct p.Instance_1 FROM Terms t, Term_Mapping tm, Left_Patterns "
                    + "p where t.Term_ID = ? and t.Term_ID = tm.Term_ID and tm.Occurrence_ID = "
                    + "p.Occurrence_ID and p.Instance_1 != 'EMPTY'";

            PreparedStatement statement = connection.prepareStatement(query);
            for (int i = 0; i < whole_training_terms.size(); i++)
            {
                seed_context_patterns[i] = new Vector();
                statement.setString(1, training_term_ids.elementAt(i).toString());
                rs = statement.executeQuery();
                while (rs.next())
                {
                    seed_context_patterns[i].add(rs.getString("Instance_1"));
                }
            }
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception in Getting Pattern Profiles of each training term : " + sqlex.getMessage());
        }
    }

    private void measure_pattern_relevance()
    {
        Vector intersection_1 = new Vector();

        Vector normalized_relevance_1 = new Vector();
        Double[] normalized_relevance_2 = new Double[get_whole_corpus_terms().size()];
        Double[] normalized_relevance_3 = new Double[get_whole_corpus_terms().size()];

        Vector overlapped_patterns;
        int size_cpp_seeds = whole_seed_context_patterns.size();


        /***************************** Calculation of Intersection with Whole Seed Terms ****************/
        for (int j = 0; j < context_patterns.length; j++)
        {
            overlapped_patterns = new Vector();
            Iterator vectorIterator = whole_seed_context_patterns.iterator();
            while (vectorIterator.hasNext())
            {
                String vector_Pattern = vectorIterator.next().toString();
                if (context_patterns[j].contains(vector_Pattern))
                {
                    overlapped_patterns.add(vector_Pattern);
                }
            }
            intersection_1.add(overlapped_patterns.size());
        }

        /***************************** Calculation of Intersection with Seperate Seed Terms ****************/
        for (int j = 0; j < context_patterns.length; j++)
        {
            normalized_relevance_2[j] = 0.0;
            normalized_relevance_3[j] = 0.0;
            for (int l = 0; l < seed_context_patterns.length; l++)
            {
                overlapped_patterns = new Vector();
                Iterator vectorIterator = seed_context_patterns[l].iterator();
                while (vectorIterator.hasNext())
                {
                    String vector_Pattern = vectorIterator.next().toString();
                    if (context_patterns[j].contains(vector_Pattern))
                    {
                        overlapped_patterns.add(vector_Pattern);
                    }
                }
                int inter = overlapped_patterns.size();
                try
                {
                    double relevance_double = 2 * inter;
                    double normalizer_double = context_patterns[j].size() + seed_context_patterns[l].size();
                    relevance_double = (normalizer_double == 0) ? 0 : relevance_double / normalizer_double;
                    normalized_relevance_2[j] = normalized_relevance_2[j] + relevance_double;

                    normalized_relevance_3[j] = (relevance_double < normalized_relevance_3[j]) ? normalized_relevance_3[j] : relevance_double;
                }
                catch (Exception ex)
                {
                    System.out.println("Exception in calculating Relevance value in Pattern");
                }
            }
            normalized_relevance_2[j] = normalized_relevance_2[j] / whole_training_terms.size();
        }

        /************* Calculation of relevance values for Whole Seed Terms ******************/
        for (int k = 0; k < context_patterns.length; k++)
        {
            double relevance_double = 2 * Double.parseDouble(intersection_1.elementAt(k).toString());
            double normalizer_double = context_patterns[k].size() + size_cpp_seeds;
            relevance_double = relevance_double / normalizer_double;
            relevance_double = truncate(relevance_double);
            normalized_relevance_1.add(Double.toString(relevance_double));
        }

        for (int i = 0; i < get_whole_corpus_terms().size(); i++)
        {
            String relevance_str_1 = normalized_relevance_1.elementAt(i).toString();
            String relevance_str_2 = normalized_relevance_2[i].toString();
            String relevance_str_3 = normalized_relevance_3[i].toString();
            String term_id = get_term_ids().elementAt(i).toString();

            String[] doc_table1 =
            {
                "Term_ID", "Relevance_CPR_1", "Relevance_CPR_2", "Relevance_CPR_3"
            };
            String[] doc_table_value1 =
            {
                term_id, relevance_str_1, relevance_str_2, relevance_str_3
            };

            insert_table("Result_Pattern", doc_table1, doc_table_value1);
        }


    }

    /**
     * Returns the Freq of Noun/Verb from Noun/Verb_Profiles table
     * @param table_name
     * @param noun_verb
     * @param occurrence_id
     * @return
     */
    public int get_count_from_Profiles(String table_name, int noun_verb, int occurrence_id)
    {
        int freq = 0;
        String query = "";
        try
        {
            statement = connection.createStatement();
            if (table_name.equals("Noun_Profiles"))
            {
                query = "SELECT Freq FROM " + table_name + " WHERE Occurrence_ID = '" + occurrence_id + "' and Noun_ID = '" + noun_verb + "'";
            }
            else
            {
                query = "SELECT Freq FROM " + table_name + " WHERE Occurrence_ID = '" + occurrence_id + "' and Verb_ID = '" + noun_verb + "'";
            }
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                freq = rs.getInt("Freq");
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception Getting Frequence of Nouns and Verbs from Profiles is : " + sqlex.getMessage());
        }
        return freq;
    }

     /**
     * Returns Noun/Verb_Profile_ID when the Noun/Verb_ID and Occurrence_ID is given
     * @param table_name
     * @param noun_verb
     * @param occurrence_id
     * @return
     */
    public int get_id_from_Profiles(String table_name, int noun_verb, int occurrence_id)
    {
        int freq = 0;
        String query = "";
        try
        {
            statement = connection.createStatement();
            if (table_name.equals("Noun_Profiles"))
            {
                query = "SELECT N_Prof_ID FROM " + table_name + " WHERE Occurrence_ID = '" + occurrence_id + "' AND Noun_ID = '" + noun_verb + "'";
                ResultSet rs = statement.executeQuery(query);
                rs.next();
                freq = rs.getInt("N_Prof_ID");

            }
            else
            {
                query = "SELECT V_Prof_ID FROM " + table_name + " WHERE Occurrence_ID = '" + occurrence_id + "' AND Verb_ID = '" + noun_verb + "'";
                ResultSet rs = statement.executeQuery(query);
                rs.next();
                freq = rs.getInt("V_Prof_ID");
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Noun or Verb Profile ID when Sen_ID and Noun_ID is given : " + sqlex.getMessage());
        }
        return freq;
    }

    /**
     * Combine all the results for individual metrics
     * @param alpha
     * @param beta
     * @param gamma
     * @param delta
     * @param noun_table
     * @param verb_table
     * @param pattern_table
     * @param lexical_table
     */
    public void get_combined_results(float alpha, float beta, float gamma, float delta,
            int noun_table, int verb_table,
            int pattern_table, int lexical_table)
    {
        Vector total_relevance = new Vector();
        get_term_ids().clear();
        String query;
        try
        {
//            query = "SELECT t.Term_ID,t.Value, rn.Term_ID," + delta + " * lr.Overall_Relevance + " + alpha +
//                    " * rn.Relevance +  " + beta +
//                    " * rv.Relevance + " + gamma +
//                    " * rp.Relevance As Overall FROM result_lexical_" + lexical_table + " lr, result_noun_" + noun_table +
//                    " rn,result_verb_" + verb_table + " rv,result_pattern_" + pattern_table +
//                    " rp, Terms t,Documents d where rn.Term_ID = rv.Term_ID and rv.Term_id = rp.Term_ID " +
//                    " and t.Value = lr.Value and rn.Term_ID = t.ID and t.Doc_ID = d.ID and d.Value = lr.Corpus";
            query = "SELECT t.Term_ID," + delta + " * lr.Relevance_LR_" + lexical_table
                    + "+" + alpha + "* rn.Relevance_CNR_" + noun_table + " + " + beta + "* rv.Relevance_CVR_"
                    + verb_table + "+" + gamma + "* rp.Relevance_CPR_" + pattern_table + " As Overall FROM "
                    + "result_lexical lr, result_noun rn,result_verb rv,result_pattern rp, Terms t where "
                    + "rn.Term_ID = rv.Term_ID and rv.Term_id = rp.Term_ID and t.Value = lr.Value and "
                    + "rn.Term_ID = t.Term_ID";

            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                total_relevance.add(rs.getString("Overall"));
                get_term_ids().add(rs.getString("Term_ID"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Calculating Combined Similarity : " + sqlex.getMessage());
        }
        for (int i = 0; i < get_term_ids().size(); i++)
        {
            String relevance = total_relevance.elementAt(i).toString();
            String term_id = get_term_ids().elementAt(i).toString();

            String[] doc_table1 =
            {
                "Term_ID", "Relevance"
            };
            String[] doc_table_value1 =
            {
                term_id, relevance
            };

            insert_table("Results_Final", doc_table1, doc_table_value1);
        }
    }

    /**
     * This method is used for testing purpose (for Parameter Optimisation)
     * @return
     */
    public int get_matches()
    {
        int matches = 0;
        Vector top_terms = new Vector();
        String query;
        try
        {
            query = "SELECT Term_ID FROM results_final order by Relevance desc LIMIT 10000";
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                top_terms.add(rs.getString("Term_ID"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Get Real Term Per Document: " + sqlex.getMessage());
        }

        Iterator iter = top_terms.iterator();
        while (iter.hasNext())
        {
            String temp = iter.next().toString();
            for (int k = 0; k < test_term_ids.size(); k++)
            {
                String temp1 = test_term_ids.elementAt(k).toString();
                if (temp1.equals(temp))
                {
                    matches++;
                }
            }
        }
        return matches;
    }

    /**
     * This method is used for testing purpose (for Parameter Optimisation)
     */
    public void get_test_terms()
    {
        test_term_ids = new Vector();
        String query;
        try
        {
//            query = "SELECT ID FROM Terms where Doc_ID IN ('1012','139')";
            query = "SELECT ID FROM Terms where Doc_ID IN ('145','160','2694','2697')";
            statement = connection.createStatement();
            ResultSet rs = statement.executeQuery(query);
            while (rs.next())
            {
                test_term_ids.add(rs.getInt("ID"));
            }
            statement.close();
        }
        catch (SQLException sqlex)
        {
            System.out.println("SQL Exception: Get Test Terms: " + sqlex.getMessage());
        }
    }
    
    private double truncate (double x)
    {
        DecimalFormat df = new  DecimalFormat ("0.##");
        String d = df.format (x);
        System.out.println ("\tformatted: " + d);
        d = d.replaceAll (",", ".");
        Double dbl = new Double (d);
        return dbl.doubleValue ();
    }

    /**
     * @return the term_ids
     */
    public Vector<String> get_term_ids()
    {
        return term_ids;
    }

    /**
     * @param term_ids the term_ids to set
     */
    public void set_term_ids(Vector term_ids)
    {
        this.term_ids = term_ids;
    }

    /**
     * @return the whole_corpus_terms
     */
    public Vector<String> get_whole_corpus_terms()
    {
        return whole_corpus_terms;
    }

    /**
     * @param whole_corpus_terms the whole_corpus_terms to set
     */
    public void set_whole_corpus_terms(Vector whole_corpus_terms)
    {
        this.whole_corpus_terms = whole_corpus_terms;
    }
}
