package de.dopichaj.labrador.search.own;


import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;
import java.util.Map.Entry;

import org.apache.log4j.Logger;

import de.dopichaj.labrador.index.Tokenizer;
import de.dopichaj.labrador.index.IntervalSequence.Interval;
import de.dopichaj.labrador.index.backend.MultiTermSearcher;
import de.dopichaj.labrador.index.backend.ReadOnlyDictionary;
import de.dopichaj.labrador.index.backend.Term;
import de.dopichaj.labrador.index.backend.TermFrequency;
import de.dopichaj.labrador.index.meta.HitFactory;
import de.dopichaj.labrador.search.NEXISearcher;
import de.dopichaj.labrador.search.QueryParseException;
import de.dopichaj.labrador.search.SearchException;
import de.dopichaj.labrador.search.hit.ChainedHit;
import de.dopichaj.labrador.search.hit.DocHitMap;
import de.dopichaj.labrador.search.hit.Hit;
import de.dopichaj.labrador.util.Configuration;


public final class OwnNEXISearcher implements NEXISearcher {
    
    private static final Logger log = Logger.getLogger(OwnNEXISearcher.class);

    private final int wantedHits;
    
    private final MultiTermSearcher searcher;
    private final ReadOnlyDictionary dictionary;
    private final HitFactory hitFactory;
    private final Tokenizer tokenizer;

    private final boolean differenceBased;

    private final SimilarityFactory similarityFactory;
    
    public OwnNEXISearcher(final MultiTermSearcher searcher,
        final ReadOnlyDictionary dictionary, final HitFactory documentMap,
        final SimilarityFactory similarityFactory,
        final Tokenizer tokenizer,
        final boolean differenceBased, int wantedHits) {
        
        this.searcher = searcher;
        this.dictionary = dictionary;
        this.hitFactory = documentMap;
        this.similarityFactory = similarityFactory;
        this.tokenizer = tokenizer;
        this.differenceBased = differenceBased;
        this.wantedHits = wantedHits;
        log.info("differenceBased: " + differenceBased);
        log.info("wantedHits: " + wantedHits);
    }

    public OwnNEXISearcher(final MultiTermSearcher searcher,
        final ReadOnlyDictionary dictionary, final HitFactory documentMap,
        final SimilarityFactory similarityFactory,
        final Tokenizer tokenizer,
        final Configuration configuration) {
        
        this(searcher, dictionary, documentMap, similarityFactory, tokenizer,
            configuration.getBool("difference-based", false),
            configuration.getInt("wanted-hits", Integer.MAX_VALUE));
    }

    public DocHitMap nexiSearch(String nexi)
        throws SearchException, QueryParseException {

        log.debug("Searching for " + nexi);
        final long startTime = System.currentTimeMillis();
        
        // de.dopichaj.labrador.search
        final OwnQuery query = new OwnQuery(nexi, tokenizer, dictionary);
        log.debug("Query terms: " + query.getQueryTerms());
        final Iterator<List<TermFrequency>> result;
        final Collection<Term> queryTerms = query.getQueryTerms();
        final int queryTermCount = queryTerms.size();
        try {
            result = searcher.search(queryTerms);
        } catch (IOException e) {
            throw new SearchException("Error searching", e);
        }
        final DocHitMap docHitMap = new DocHitMap();
        
        // create the similarity function
        final Similarity sim = similarityFactory.getSimilarity(query);
        
        // create the hits
        final Iterator<Interval<File>> limitIterator = hitFactory.limitIterator();
        Interval<File> nextLimit = limitIterator.next();
        
        // while there are results
        long timeForMeta = 0;
        long timeForPropagate = 0;
        final int[] hitCounts = new int[queryTermCount];
        long totalSearchTime = -System.currentTimeMillis();
        List<TermFrequency> lookahead = result.hasNext() ? result.next() : null;
        totalSearchTime += System.currentTimeMillis();
        int minHitCount = 1;
        int totalHitCount = 0;
        int totalDocuments = 0;
        int invEntryCount = 0;
        while (lookahead != null) {
            
            // while there is a next fragment and it belongs to the current document
            int hitsInDocument = 0;
            final Map<Integer, List<TermFrequency>> listMap = new TreeMap<Integer, List<TermFrequency>>();
            final List<Integer> wantedIDs = new ArrayList<Integer>();
            while (lookahead != null && lookahead.get(0).getDocumentID() <= nextLimit.fragmentIdLimit) {

                // determine the new number of hits in the document
                if (lookahead.size() > hitsInDocument) {
                    hitsInDocument = lookahead.size();
                }
                invEntryCount += lookahead.size();
                totalHitCount++;
                
                // add its ID to the wanted IDs
                final int documentID = lookahead.get(0).getDocumentID();
                wantedIDs.add(documentID);
                
                // add its list to the list map
                listMap.put(documentID, lookahead);
                
                // advance to the next fragment
                totalSearchTime -= System.currentTimeMillis();
                lookahead = result.hasNext() ? result.next() : null;
                totalSearchTime += System.currentTimeMillis();
            }
            
            // calculate the similarity for the current document's hits
            if (hitsInDocument > 0) {
                totalDocuments++;
            }

            if (hitsInDocument >= minHitCount) {
                for (int i = 0; i < hitsInDocument; i++) {
                    hitCounts[i]++;
                }
                
                // create the map from hit to tf list
                final Map<Hit, List<TermFrequency>> hitFrequencies =
                    new HashMap<Hit, List<TermFrequency>>();
                timeForMeta -= System.currentTimeMillis();
                final Set<Entry<Integer, Hit>> entrySet = ((NavigableMap<Integer, Hit>)hitFactory.getDocumentHits(nextLimit.data, wantedIDs)).descendingMap().entrySet();
                timeForMeta += System.currentTimeMillis();
                for (final Entry<Integer, Hit> entry :
                    entrySet) {
                
                    final ChainedHit hit = (ChainedHit)entry.getValue();
                    docHitMap.addHit(hit);
                    
                    final List<TermFrequency> frequencyList;
                    timeForPropagate -= System.currentTimeMillis();
                    if (differenceBased) {
                        frequencyList = addFrequencyLists(
                        listMap.get(entry.getKey()), hitFrequencies.get(hit));
                        propagateFrequenciesUp(hit, hitFrequencies, frequencyList);
                    } else {
                        frequencyList = listMap.get(entry.getKey());
                    }
                    timeForPropagate += System.currentTimeMillis();
                    double similarity = sim.similarity(hit, frequencyList);
                    hit.setScore((float)similarity);
                }
            }
            
            // determine the new minimum number of hits for inclusion in the results
            for (int i = queryTermCount - 1; i > minHitCount; i--) {
                if (hitCounts[i] >= wantedHits) {
                    minHitCount = i;
                    break;
                }
            }
            
            // advance to the next document
            if (lookahead != null) {
                nextLimit = limitIterator.next();
            }
        }
        
        final long endTime = System.currentTimeMillis();
        log.info("Finished searching: " + totalHitCount + " hits in " + totalDocuments +
            " documents, took " + (endTime - startTime) + " ms, " + timeForMeta +
            " of that for metadata construction, " + timeForPropagate + " ms for tf list propagation, " +
            totalSearchTime + " ms for inverted list reading, " + invEntryCount + " inverted list entries read.");
        return docHitMap;
    }

    private void propagateFrequenciesUp(ChainedHit hit,
        Map<Hit, List<TermFrequency>> hitFrequencies,
        List<TermFrequency> frequencyList) {
        
        final ChainedHit parent = hit.getParent();
        if (parent != null) {
            List<TermFrequency> newList =
                addFrequencyLists(frequencyList, hitFrequencies.get(parent));
            hitFrequencies.put(parent, newList);
        }
    }

    /**
     * @param listToAdd
     * @param resultList
     *          The list to return, or <code>null</code> to create a new list.
     *          If this is non-<code>null</code>, it must be sorted.
     * @return
     */
    private List<TermFrequency> addFrequencyLists(final List<TermFrequency> listToAdd,
        final List<TermFrequency> resultList) {

        if (listToAdd == null) {
            return resultList;
        } else if (resultList == null) {
            final List<TermFrequency> result = new ArrayList<TermFrequency>(listToAdd.size());
            for (final TermFrequency input : listToAdd) {
                result.add(new TermFrequency(input.getTermID(), 0,
                    input.getFrequency()));
            }
            return result;
        } else {
                
            for (final TermFrequency freq : listToAdd) {
                
                final TermFrequency zeroFreq = new TermFrequency(freq.getTermID(), 0, freq.getFrequency());
                final int pos = Collections.binarySearch(resultList, zeroFreq);
                if (pos >= 0) {
                    resultList.set(pos, new TermFrequency(freq.getTermID(),
                        0, freq.getFrequency() + resultList.get(pos).getFrequency())); 
                } else {
                    resultList.add(-pos - 1,
                        new TermFrequency(freq.getTermID(), 0,
                            freq.getFrequency()));
                }
                
            }
            return resultList;
        }
    }

    public void close() {
    }

    public String getDescription() {
        return similarityFactory.getDescription() + " kept: " + wantedHits;
    }
}
/*
Copyright (c) 2005-2007 Philipp Dopichaj

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/