/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package gblend.exactquery;

import gblend.adjlistgraph.ALGraph;
import gblend.adjlistgraph.FsgIdEntry;
import java.sql.*;
import java.util.HashSet;
import java.util.Iterator;
import java.util.ArrayList;
import gblend.db.DatabaseInfo;
import gblend.db.Parameters;
import gblend.frequentindex.BuildFreqIndex;
import gblend.frequentindex.FGraph;
//import gblend.infrequentindex.BuildCam;
import gblend.infrequentindex.BuildInfreqIndex;
import gblend.largegraph.Conversion;
import java.util.HashMap;
import java.util.Vector;


/*
 * Copyright 2009, Center for Advanced Information Systems,Nanyang Technological University
 *
 * File name: QueryEngine.java
 *
 * Abstract: The main query engine including the SPIG construction
 *
 * Current Version:      0.1
 * Auther:               Jin Changjiu
 * Modified Date:        Jun 20,2010
 *
 */
/**
 *
 * @author cjjin
 */
public class QueryEngine {

    private static BuildInfreqIndex infindex = new BuildInfreqIndex();
    private static BuildFreqIndex findex = new BuildFreqIndex();
    private DatabaseInfo dbinfo;
    private Parameters parameters = null;
    private ReadCluster cluster = null;
    // private BuildCam bc = new BuildCam();
    private int sigma = 0;
    private int flag = 0;//0: frequent; 1: dif; 2: common infrequent; 3: similarity
    //FOR LARGE GRAPH, this should be an FsgIdEntry
    private HashSet<FsgIdEntry> idlist = new HashSet<FsgIdEntry>();
    private String dir = null;
    private ArrayList<ALGraph> graphSet = new ArrayList<ALGraph>();
    private HashMap<Integer, ArrayList<SNode>> featureTable = null;
    private ArrayList<SpindleGraph> spindleGraphSet = new ArrayList<SpindleGraph>();
    private HashMap<Integer, FGraph> fetchedClusters = new HashMap<Integer, FGraph>();
    // private Vector<Integer> clusterIdlist = new Vector<Integer>();//the existing cluster id set
    private BuildSpig buildspid = new BuildSpig();

    //search fragment in A2F-index
    public void SearchA2Findex(SNode nodei) {
        FGraph mfindex = findex.getFreqGraph();

        if (nodei.getEnum() < (parameters.getb() + 1)) {
            HashSet<Integer> freqSet = new HashSet<Integer>();
            //get the frequent fragments in nodei's frequent list
            for (Iterator itr = nodei.getFreIds().iterator(); itr.hasNext();) {
                int fid = (Integer) itr.next();//the frequent fragment id
                freqSet.addAll(mfindex.getNode(fid).getChildren());
            }

            //test nodei from the common children in MFindex
            for (Iterator itr = freqSet.iterator(); itr.hasNext();) {
                int cid = (Integer) itr.next();
                if (nodei.getCam().equals(mfindex.getNode(cid).getCam())) {
                    nodei.setItsFreqId(cid);//look for the matched freq fragment with nodei in MFindex
                    break;
                }
            }

        } //the children list in DF-index
        else if (nodei.getEnum() == (parameters.getb() + 1)) {
            //use to keep the common clusters of nodei's parents
            ArrayList<Integer> commonClusters = new ArrayList<Integer>();

            for (Iterator itr = nodei.getFreIds().iterator(); itr.hasNext();) {
                int fid = (Integer) itr.next();
                if (commonClusters.isEmpty()) {
                    commonClusters.addAll(mfindex.getNode(fid).getClusters());
                } else {
                    commonClusters.retainAll(mfindex.getNode(fid).getClusters());
                }
            }

            for (int m = 0; m < commonClusters.size(); m++) {

                int cid = commonClusters.get(m);//the cluster id, not the position
                String clusterTop = mfindex.getClusterHeads().get(cid);//get the root node's cam of cluster cid

                //   int k = clusterIdlist.indexOf(cid);//check cid from fetched cluster list
                if (!fetchedClusters.containsKey(cid))//if this cluster doesn't exist,look for it
                {
                    if (clusterTop.equals(nodei.getCam())) {
                        nodei.setItsFreqId(0); //set nodei as the root of this cluster
                        FGraph fgCluster = cluster.Clusterreading(cid);//fetch this cluster to memory
                        nodei.setClusterId(cid);

                        fetchedClusters.put(cid, fgCluster);
                        //     clusterIdlist.addElement(cid);
                        break;
                    }
                } else// if cluster cid exists
                {
                    if (clusterTop.equals(nodei.getCam())) {
                        nodei.setItsFreqId(0);//0 is the root node id in cluster
                        nodei.setClusterId(cid);//associate the cluster id with nodei
                        break;
                    }
                }
            }

        } else if (nodei.getEnum() > parameters.getb() + 1) {
            ArrayList<Integer> commonChildren = new ArrayList<Integer>();
            FGraph cluster = fetchedClusters.get(nodei.getClusterId());

            for (Iterator itr = nodei.getFreIds().iterator(); itr.hasNext();) {
                int fid = (Integer) itr.next();//the parent of nodei
                if (commonChildren.isEmpty()) {
                    commonChildren.addAll(cluster.getNode(fid).getChildren());
                } else {
                    commonChildren.retainAll(cluster.getNode(fid).getChildren());
                }
            }

            for (int n = 0; n < commonChildren.size(); n++) //look for nodei
            {
                int nid = commonChildren.get(n);//the position id in nodei's cluster
                ALGraph fg = cluster.getNode(nid);

                if (nodei.getCam().equals(fg.getCam())) {
                    nodei.setItsFreqId(nid);
                    break;
                }
            }
        }
    }

    //the main exact query algorithm
    public int executeQuery(BuildQuery bq, int edgelabel, boolean status) throws SQLException, ClassNotFoundException {
        System.out.println(">>> QueryEngine.executeQuery");
        SpindleGraph exgraph = null;
        SNode query = null;
        if (edgelabel == 0)//normal exact query process, create a new spig
        {
            query = bq.getWholeQuery();//the original query structure
            //start the construction of spindle-shaped graph
            //  long time1 = System.currentTimeMillis();

            exgraph = buildspid.buildSpiGraph(query, bq.getSeed(), this, spindleGraphSet);
            conFeatureTable(exgraph, query);

            //  long time2 = System.currentTimeMillis();
            //System.out.println("The construction time(ms): "+ (time2-time1));

            int sglabel = bq.getSeed().getEdge(0, 1);
            exgraph.setId(sglabel);//set label of spindle graph
            spindleGraphSet.add(exgraph);//add the current spindle graph

        } else {
            //after modifying the query, the last sg is the current spindle-graph
            exgraph = spindleGraphSet.get(spindleGraphSet.size() - 1);
        }

        if (status == true) {
            query = exgraph.getNode(exgraph.getSize() - 1);//the last node in sg after current graph building
            retrieveIdset(query);//retrieve the id set of query
            System.out.println("Exact query candidate number: " + idlist.size());

            if (idlist.isEmpty()) {
                flag = 3;//similarity query
            } else if (flag == 2)//only when query is a common infrequent, fetch candidate graphs
            {

                /*  if (graphSet.isEmpty()) {
                fetchFromMem(idlist, graphSet);//fetch graphs
                } else//delete graphs which doesn't in idlist from graphSet
                {
                for (Iterator itr = graphSet.iterator(); itr.hasNext();) {
                ALGraph ig = (ALGraph) itr.next();
                FsgIdEntry cId = ig.getGraphidAsEntry();
                /*if(cId==null){
                cId = new FsgIdEntry();
                cId.addGraphId(ig.getGraphid());
                }
                if (!idlist.contains(cId)) {
                itr.remove();
                }
                }
                }*/
            }
        }
        Iterator<FsgIdEntry> idlisti = idlist.iterator();
        while (idlisti.hasNext()) {
            System.out.print(idlisti.next() + ",");
        }
        System.out.println();
        return idlist.size();

    }

    public void retrieveIdset(SNode query) {
        System.out.println(">>> QueryEngine.retrieveIdset");
        ArrayList<ALGraph> difIndex = infindex.getInfGraph();//the Dif-index
        idlist = new HashSet<FsgIdEntry>();//reset idlist

        if (query.getItsFreqId() != -1)//the current query is a frequent one
        {
            flag = 0;
            //fetch id set from the MF-index
            if (query.getEnum() <= parameters.getb()) {
                fetchIdsetMFindex(idlist, query.getItsFreqId());
            } else//search in the DF-index
            {
                fetchIdsetDFindex(idlist, query.getItsFreqId(), fetchedClusters.get(query.getClusterId()));
            }
        } else if (query.getItsDifId() != -1)//the current query is a dif one
        {
            idlist.addAll(difIndex.get(query.getItsDifId()).getidlist());
            flag = 1;
        } else //current query is a common infrequent
        {

            flag = 2;
            //for the contained difs
            idlist = new HashSet<FsgIdEntry>(query.getCandParentList().getSet());
            /*for (Iterator itr = query.getDifIds().iterator(); itr.hasNext();) {
            int id = (Integer) itr.next();
            if (idlist.isEmpty()) {
            idlist.addAll(difIndex.elementAt(id).getidlist());
            } else {
            idlist.retainAll(difIndex.elementAt(id).getidlist());
            }
            }

            //for the contained frequent fragments
            for (Iterator itr = query.getFreIds().iterator(); itr.hasNext();) {
            int id = (Integer) itr.next();
            if (idlist.isEmpty()) {
            fetchIdsetMFindex(idlist, id);
            } else {
            HashSet<FsgIdEntry> tmpidlist = new HashSet<FsgIdEntry>();
            fetchIdsetMFindex(tmpidlist, id);
            //intersection with the idset of frequent fragment
            idlist.retainAll(tmpidlist);
            tmpidlist.clear();
            }
            }*/
        }

    }

    public void fetchIdsetMFindex(HashSet<FsgIdEntry> idset, int id) {
        System.out.println(">>> QueryEngine.fetchIdsetMFindex");
        idset.clear();
        FGraph freqgraph = findex.getFreqGraph();
        ALGraph fg = freqgraph.getNode(id);
        idset.addAll(fg.getidlist());
        //add its successors' idlists
        freqgraph.dsf(id);

        for (int i = 0; i < fg.getSucc().size(); i++) {
            int gid = fg.getSucc().get(i);
            idset.addAll(freqgraph.getNode(gid).getidlist());
        }

    }

    public void fetchIdsetDFindex(HashSet<FsgIdEntry> idset, int id, int clusterId) {
        fetchIdsetDFindex(idset, id, fetchedClusters.get(clusterId));
    }
    //fetch the id set from DF-index, firstly we need to know the fragment is in which cluster

    public void fetchIdsetDFindex(HashSet<FsgIdEntry> idset, int id, FGraph fgCluster) {
        System.out.println(">>> QueryEngine.fetchIdsetDFindex");
        Vector<Integer> oldlist = new Vector<Integer>();//store the old ids of the successors
        ALGraph fg = fgCluster.getNode(id);
        oldlist.add(fg.getGraphid()[0]);//the old id of node(id)
        fgCluster.dsf(id);

        for (Iterator itr = fg.getSucc().iterator(); itr.hasNext();) {
            int gid = (Integer) itr.next();
            oldlist.add(fgCluster.getNode(gid).getGraphid()[0]);
        }
        cluster.Idsreading(oldlist, idset);
    }

    public HashSet<FsgIdEntry> fetchIdsetA2I(int idx) {
        return infindex.getInfGraph().get(idx).getidlist();

    }

    //fetch graphs from the memory
    public void fetchFromMem(HashSet<FsgIdEntry> list, ArrayList<ALGraph> candidateSet) {
        System.out.println(">>> QueryEngine.fetFromMem");
        for (Iterator<FsgIdEntry> itr = list.iterator(); itr.hasNext();) {
            FsgIdEntry gidlist = itr.next();
            ALGraph graph = null;
            if (gidlist.getIdSize() == 1) {
                graph = infindex.getALGraphs().get(gidlist);
            } else {
                graph = Conversion.combinedToAL(dir + "data/" + gidlist.toString());
            }
            candidateSet.add(graph);
        }
    }

    //fetch single graph from memory
    public ALGraph fetchFromMem(FsgIdEntry id) {
        System.out.println(">>> QueryEngine.fetchFromMem");
        ALGraph graph = infindex.getALGraphs().get(id);
        if (graph == null) {
            graph = Conversion.combinedToAL(dir + "data/" + id.toString());
        }
        return graph;
    }

    public void conFeatureTable(SpindleGraph spigraph, SNode query) {
        System.out.println(">>> QueryEngine.conFeatureTable");
        if (query.getEnum() - 3 >= parameters.getDelta()) {
            sigma = parameters.getDelta();
        } else {
            sigma = query.getEnum() / 2;
        }

        featureTable = new HashMap<Integer, ArrayList<SNode>>();
        Iterator<SpindleGraph> sgphsi = spindleGraphSet.iterator();
        while (sgphsi.hasNext()) {
            SpindleGraph sgsie = sgphsi.next();
            int sgsize = sgsie.getSize();
            for (int i = 0; i < sgsize; i++)//start from i=1,because node(0) has no parents
            {
                System.out.print("spigraph i=" + i);
                SNode node = sgsie.getNode(i);
                //if the size of exnode in this level >= |Q|-sigma,
                //record the features contained in each exnode in this level
                int levelSize = node.getEnum();
                System.out.print(" , levelSize=" + levelSize);
                if (levelSize >= query.getEnum() - sigma) {
                    System.out.print(", >=query.getEnum()-sigma");
                    if (!featureTable.containsKey(levelSize)) {
                        //a new level is created
                        ArrayList<SNode> featureVector = new ArrayList<SNode>();
                        featureVector.add(node);
                        System.out.println(",put key=" + levelSize);
                        featureTable.put(levelSize, featureVector);
                    } else //insert the exnode in the level with the same edge num
                    {
                        //insert the proper node in the feature table
                        //only add the snode which is not the superset of the existing snodes
                        //because the larger the node, the less candidate space
                        nodeInsert(node, levelSize);
                    }
                }//end level
            }
        }
        int sgsize = spigraph.getSize();
        for (int i = 0; i < sgsize; i++)//start from i=1,because node(0) has no parents
        {
            System.out.print("spigraph i=" + i);
            SNode node = spigraph.getNode(i);
            //if the size of exnode in this level >= |Q|-sigma,
            //record the features contained in each exnode in this level
            int levelSize = node.getEnum();
            System.out.print(" , levelSize=" + levelSize);
            if (levelSize >= query.getEnum() - sigma) {
                System.out.print(", >=query.getEnum()-sigma");
                if (!featureTable.containsKey(levelSize)) {
                    //a new level is created
                    ArrayList<SNode> featureVector = new ArrayList<SNode>();
                    featureVector.add(node);
                    System.out.println(",put key=" + levelSize);
                    featureTable.put(levelSize, featureVector);
                } else //insert the exnode in the level with the same edge num
                {
                    //insert the proper node in the feature table
                    //only add the snode which is not the superset of the existing snodes
                    //because the larger the node, the less candidate space
                    nodeInsert(node, levelSize);
                }
            }//end level
        }

        //delete the unuseful records generated previously in feature Table
        //I don't think it's NECESSARY here, you reset feature table everytime
        /*for (Iterator itr = featureTable.keySet().iterator(); itr.hasNext();) {
        int key = (Integer) itr.next();
        if (key < query.getEnum() - sigma) {
        System.out.println("Remove key = " + key);
        featureTable.remove(key);
        }
        }*/

    }

//if node exists in featureTable, don't insert it
    public boolean nodeInsert(SNode nodei, int levelSize) {
        System.out.println(">>> QueryEngine.nodeInsert");
        for (Iterator itr = featureTable.get(levelSize).iterator(); itr.hasNext();) {
            SNode existingNode = (SNode) itr.next();

            if (existingNode.getCam().equals(nodei.getCam())) {
                return false;
            }
        }

        featureTable.get(levelSize).add(nodei);
        return true;

    }

    public void setDBcon(DatabaseInfo db, Parameters para) {
        dbinfo = db;
        parameters = para;
        dir = "data/" + para.getName() + "/" + para.getdatasize() + "k/";
        cluster = new ReadCluster(parameters);
    }

    public DatabaseInfo getDBcon() {
        return dbinfo;
    }

    public Parameters getPara() {
        return parameters;
    }

    public HashMap<Integer, ArrayList<SNode>> getFeatureTable() {
        return featureTable;
    }

    public int getSigma() {
        return sigma;
    }

    public ArrayList<SpindleGraph> getSpindleGraphSet() {
        return spindleGraphSet;
    }

    public ArrayList<ALGraph> getCandidateGraph() {
        if (graphSet.isEmpty()) {
            fetchFromMem(idlist, graphSet);//fetch graphs
        }
        return graphSet;
    }

    public HashSet<FsgIdEntry> getCandidateIds() {
        return idlist;
    }

    public BuildFreqIndex getfindex() {
        return findex;
    }

    public BuildInfreqIndex getinfindex() {
        return infindex;
    }

    public int getQuerytype() {
        return flag;
    }
}
