/*
 * SimilarDocSelector.java
 *
 * Copyright (c) 2006-2007 Computer-Aided Integrated Systems Laboratory, 
 * St. Petersburg Institute for Informatics and Automation RAS http://cais.lisa.iias.spb.su
 */

package guislicer;

import dbindex.DocTable;
import dbindex.ComparativeClass;
//import dbindex.ComparativeAttr;
import dbindex.Language;
import topindex.HashUtils;
import topindex.Index;
import Ontology.WebDESOOntology;
import Ontology.WDSliceModeller;
import Ontology.Classes;
import Ontology.Attributes;

import wikipedia.sql.Connect;
import wikipedia.util.StringUtil;

import java.sql.Connection;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;

/**
 *
 */
public class SimilarDocSelector {
    
    //public SimilarDocSelector() {
    //}
    
    /* Selects similar documents (sorted by relevance) for the document doc_id using Index.
     * Index should be built before.
     *
     * Precision & Recall algorithm.
     *  1. Take indexed documents d by its 'title'.
     *  2. Take list L of classes, attributes, relations from Index by document D.
     *  3* Build slice S in WebDeso ontology by L (ontology slice for the indexed document).
     *  4. Search most relevant documents by S.
     *  5. Filter: returns only Wikipedia documents (local docs are in another experiment)
     *
     *  3* In really it should be "context slice", but we don't have it. In order to test 
     *      algorithm "Relevance estimation and selection of relevant documents from index",
     *      context slice is substituted by document slice.
     *
     * @param onto_id       ontology identifier
     * @param wiki_docs     all existed indexed Wikipedia documents
     * @param doc_id        slice will be created for the document with this ID.
     * @param dump_filepath where to store file with the graph in Pajek format, skip if it is null
     
     */
    public static List<DocTable> selectFromWiki(wikipedia.sql.Connect  wiki_conn,
                                java.sql.Connection     ksnetcontext_conn, Language lang,
                                WebDESOOntology         wdo,
                                int                     onto_id,
                                DocTable[]              wiki_docs,  //dd
                                //Map<String, DocTable>   title_to_doc, //doc_titles   //@param title_to_doc  map from document title to the document
                                int                     doc_id,     // String title)    
                                boolean                 b_debug,
                                String                  dump_filepath)
    {   
        /*
        // 1. 
        if (null == wiki_conn) {
            System.out.println("Error: There is no connection to Wikipedia. Use ConnectWikipedia(). SimilarDocSelector.select().");
            return null;
        }
        DocTable[] dd = DocTable.GetAllExistedInWikipedia (ksnetcontext_conn, wiki_conn);
        Map<String, DocTable> doc_titles = new HashMap<String, DocTable>();
        for(DocTable d:dd) {
            doc_titles.put(d.title, d);
        }
        DocTable d_source = doc_titles.get(title);
        
        // 2. 
        int[] class_id = ComparativeClass.getClassIDByDocID(ksnetcontext_conn, d_source.doc_id);
        int[] attr_id  = ComparativeAttr.getAttrIDByDocID(ksnetcontext_conn, d_source.doc_id);
        
        WebDESOOntology WDO = new WebDESOOntology( onto_id );
        if (WDO == null) {
            System.out.println("Error: WebDeso database data is absent. Use ConnectWebDeso(). SimilarDocSelector.select().");
            return null;
        }
        
        // 3.
        WDSliceModeller slice = new WDSliceModeller( onto_id, class_id, attr_id, false);
        */
        
        // 1.2.3
        // variant A
        //WDSliceModeller slice_context = DocSelector.createSliceFromIndexByDocID (
        //                        ksnetcontext_conn, onto_id, doc_id, b_debug);
        
        // Variant B
		//WDSliceModeller slice_context = DocSelector.createTestSlice();
        
        WDSliceModeller slice_context = DocSelector.createTestSlice(lang.toInt());
        if (null == slice_context) {
            //System.out.println("Error: Null slice created for the document with doc_id '"+doc_id+"'. SimilarDocSelector.selectFromWiki().");
            return null;
        }
        
        // 4.
        DocSelector ds = new DocSelector();
        List<DocTable> docs = ds.select(
                ksnetcontext_conn, lang, 
                wdo, onto_id,
                slice_context.vResClassAttr, slice_context.vResClasses, slice_context.vResAttr, slice_context.vResHier, 
                slice_context.vResAssoc,     slice_context.vResFunc,    null,  // null, since there is now Taxonomical relations in slice now.
                dump_filepath);
                
        if(null == docs || 0 == docs.size())
            return null;
        
        // 5.
        Map<Integer, DocTable> id_to_doc = new HashMap<Integer, DocTable>();
        for(DocTable d:wiki_docs) {
            id_to_doc.put(d.doc_id, d);
        }
        List<DocTable> docs_filtered = new ArrayList<DocTable>();
        for(DocTable d:docs) {
            if(id_to_doc.containsKey(d.doc_id)) {
                docs_filtered.add(d);
            }
        }
        return docs_filtered;
    }
    

    /** This is simplified version of selectFromWiki (@see selectFromWiki).
     *  @param onto_id  ontology identifier
     *  @param title    slice will be created for the document with this title
     */
    public static List<DocTable> selectFromWikiByDocName(
            int     onto_id,
            String ksnetcontext_db, String ksnetcontext_user,String ksnetcontext_pass, Language lang,
            String  wiki_host,String wiki_db,String wiki_user,String wiki_pass, 
            String  title)
    {
        long    t_start_wdo, t_end_wdo, t_start_wiki, t_end_wiki, t1, t2;
        float   t_wdo, t_wiki, t_total, t12, t_GetAllExistedInWikipedia, t_selectFromWiki;
        
        System.out.println("Started selectFromWikiByDocName()...");
        
        
        t_start_wdo = System.currentTimeMillis();
        WebDESOOntology WDO = new WebDESOOntology(onto_id, lang.toInt());
        t_end_wdo  = System.currentTimeMillis();
        t_wdo = (t_end_wdo - t_start_wdo)/1000f; // in sec

        t_start_wiki = System.currentTimeMillis();
        Index index_docs = new Index();
        //if(!index_docs.ConnectWikipedia("localhost", "enwiki?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useUnbufferedInput=false", "javawiki", "")) {
        //if(!index_docs.ConnectWikipedia("192.168.0.29", "enwiki?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useUnbufferedInput=false", "javawiki", "")) {
        if(!index_docs.ConnectWikipedia(wiki_host, wiki_db, wiki_user, wiki_pass)) {
            System.out.println("Can't connect to Wikipedia");
            System.out.println(   "wiki_host="  + wiki_host +
                                "; wiki_db="    + wiki_db   +
                                "; wiki_user="  + wiki_user +
                                "; wiki_pass="  + wiki_pass);
            return null;
        }
        
        //java.sql.Connection ksnetcontext_conn = index_docs.getDBConnection("192.168.0.101:3306/ksnetcontext", "michael", "12345" );
        java.sql.Connection ksnetcontext_conn = index_docs.getDBConnection(ksnetcontext_db, ksnetcontext_user, ksnetcontext_pass);
        
        t1 = System.currentTimeMillis();
        DocTable[] wiki_docs = DocTable.GetAllExistedInWikipedia (ksnetcontext_conn, lang, index_docs.wp_conn);
        t2  = System.currentTimeMillis();
        t_GetAllExistedInWikipedia = (t2 - t1)/1000f; // in sec
        
        // the map for all existed and indexed wikipedia documents
        Map<String, DocTable> title_to_doc = DocTable.createMapTitleToDoc(wiki_docs);
        
        if(title_to_doc.containsKey(title)) {
            DocTable source_doc = title_to_doc.get(title);
            
            t1 = System.currentTimeMillis();
            String dump_graph_pajec_filepath = Index.B_DEBUG ? "./data/ent_arch_pajek/" : null;
            
            List<DocTable> docs = selectFromWiki(
                    index_docs.wp_conn,
                    ksnetcontext_conn, lang,
                    WDO, onto_id,
                    wiki_docs, source_doc.doc_id, false, // false:debug print
                    dump_graph_pajec_filepath);
            t2  = System.currentTimeMillis();
            t_selectFromWiki = (t2 - t1)/1000f; // in sec
            
            
            t_end_wiki  = System.currentTimeMillis();
            t_wiki = (t_end_wiki - t_start_wiki)/1000f; // in sec
            t_total = (t_end_wiki - t_start_wdo)/1000f; // in sec
            
            System.out.println("time WDO   sec:" + t_wdo);
            System.out.println("time Wiki  sec:" + t_wiki);
            System.out.println("    time DocTable[] wiki_docs = DocTable.GetAllExistedInWikipedia sec:" + t_GetAllExistedInWikipedia);
            System.out.println("    time selectFromWiki sec:" + t_selectFromWiki);
            System.out.println("time total sec:" + t_total);
            return docs;
        }
        return null;
    }
    
    /** Gets XML description of document */
    public static String getXML(List<DocTable> docs)
    {
        String s = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
        
        s += "<Sorted>";
        for(int i=0;null != docs && i<docs.size(); i++) {
            s += "<Document>";
            DocTable d = docs.get(i);
            
            s += "<Title>";
            s += StringUtil.underscoreToSpace(d.title);
            s += "</Title>";
            
            s += "<Rel>";
            // see http://java.sun.com/developer/technicalArticles/Programming/sprintf/
            s += new PrintfFormat("%.2lg").sprintf(d.relevance);
            //s += d.relevance;
            s += "</Rel>";
            
            s += "<Sim>";
            //s += d.sim;
            s += new PrintfFormat("%.2lg").sprintf(d.sim);
            s += "</Sim>";
            
            s += "<URI>";
            if (d.url==null || d.url.length()==0)
            	s += d.filepath;
            else
            	s += d.url; 
            s += "</URI>";
            
            s += "</Document>";
        }
        s += "</Sorted>";
        
        return s;
    }
    
    
}
