package index;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;

import net.barashev.seclub.corpus.Corpus;
import net.barashev.seclub.corpus.Document;
import net.barashev.seclub.io.BufferManager;
import net.barashev.seclub.io.DataMarshaller;
import net.barashev.seclub.io.OutputSlot;
import net.barashev.seclub.io.file.FileManager;
import net.barashev.seclub.io.file.VFile;
import net.barashev.seclub.io.marshaller.IntegerMarshaller;

public abstract class Indexator {
    public static final String FILENAME_WEIGHT_LINKS = "linkRank";

    /**
     *A stop list of 25 semantically non-selective words which are common in
     * Reuters-RSV1.
     *
     * Source: "Introduction to Informational Retrieval"
     */
    public static final Set<String> BAN_LIST_TOKENs;

    final protected BufferManager bufferMgr;
    final protected FileManager fileMgr;
    final protected Corpus corpus;
    protected MemoryManager memory;
    private Timer timer = new Timer();

    protected int amountWriteToDisk = 0;

    private int sizeOfWorkingWithDisk = 0;

    static {
        BAN_LIST_TOKENs = new HashSet<String>();
        BAN_LIST_TOKENs.add("a");
        BAN_LIST_TOKENs.add("an");
        BAN_LIST_TOKENs.add("and");
        BAN_LIST_TOKENs.add("are");
        BAN_LIST_TOKENs.add("as");
        BAN_LIST_TOKENs.add("at");
        BAN_LIST_TOKENs.add("be");
        BAN_LIST_TOKENs.add("by");
        BAN_LIST_TOKENs.add("for");
        BAN_LIST_TOKENs.add("from");
        BAN_LIST_TOKENs.add("has");
        BAN_LIST_TOKENs.add("he");
        BAN_LIST_TOKENs.add("in");
        BAN_LIST_TOKENs.add("is");
        BAN_LIST_TOKENs.add("it");
        BAN_LIST_TOKENs.add("its");
        BAN_LIST_TOKENs.add("of");
        BAN_LIST_TOKENs.add("on");
        BAN_LIST_TOKENs.add("that");
        BAN_LIST_TOKENs.add("the");
        BAN_LIST_TOKENs.add("to");
        BAN_LIST_TOKENs.add("was");
        BAN_LIST_TOKENs.add("were");
        BAN_LIST_TOKENs.add("will");
        BAN_LIST_TOKENs.add("with");
        BAN_LIST_TOKENs.add("http");
        BAN_LIST_TOKENs.add("www");
    }

    public Indexator(Corpus corpus, FileManager fileMgr,
            BufferManager bufferMgr, int sizeMemory) {
        this.corpus = corpus;
        this.fileMgr = fileMgr;
        this.bufferMgr = bufferMgr;
        this.memory = new MemoryManager(sizeMemory);
    }

    public void index() throws IOException {
        timer.run();
        CalculatorOfWorkingWithDisk.reset();
        // createWeightsOfDocumentsUsingOutgoingLinks();
        abstractIndex();
        timer.stop();
        sizeOfWorkingWithDisk = CalculatorOfWorkingWithDisk.compute();
    }

    public int sizeOfWorkingWithDisk(){
        return sizeOfWorkingWithDisk;
    }

    protected abstract void abstractIndex() throws IOException;

    public long getDurationInMiliSec() {
        return timer.getDurationInMiliSec();
    }

    public int getMemorySize() {
        return memory.size();
    }

    private void createWeightsOfDocumentsUsingOutgoingLinks()
            throws IOException {
        Balance balance = new Balance(corpus, memory.size());
        Map<Integer, Integer> weightLinks = balance.weighLinks();
        writeWeightLinks(weightLinks);
    }

    private void writeWeightLinks(Map<Integer, Integer> weightLinks)
            throws IOException {
        Integer[] sortedIdDoc = weightLinks.keySet().toArray(new Integer[0]);
        Arrays.sort(sortedIdDoc);
        VFile<Integer> file = this.fileMgr.createFile(FILENAME_WEIGHT_LINKS,
                new IntegerMarshaller());
        OutputSlot<Integer, DataMarshaller<Integer>> slot = this.bufferMgr
                .allocateOutputSlot(file, false);
        for (int i = 0; i < sortedIdDoc.length; i++) {
            Integer idDoc = sortedIdDoc[i];
            slot.append(idDoc);
            slot.append(weightLinks.get(idDoc));
        }
        slot.flush();
        slot.release();
    }

    protected Map<String, PostingList> index(Document doc) {
        XML_Informational xml_info = new XML_Informational(doc.getContent());

        Map<String, PostingList> indexOfDocument = recordPositions(doc.getID(),
                xml_info.getTermsInBody());
        indexOfDocument = recordMetaDatas(doc.getID(), xml_info
                .getTermsInTitle(), indexOfDocument);

        return indexOfDocument;
        //return indexForTestCorpus(doc);
    }

    private Map<String, PostingList> recordMetaDatas(int docID,
            List<String> terms, Map<String, PostingList> indexOfDocument) {
        if (terms != null) {
            for (String term : terms) {
                PostingList pList = indexOfDocument.get(term);
                if (pList == null) {
                    pList = new PostingList();
                    indexOfDocument.put(term, pList);
                    memory.subtractIntegers(2);// idDoc and N
                }
                pList.addMetaData(docID, 1);// 1 - <title> content Term
                memory.subtractInteger();
            }
        }
        return indexOfDocument;
    }

    private Map<String, PostingList> recordPositions(int docID,
            List<String> terms) {
        Map<String, PostingList> indexOfDocument = new HashMap<String, PostingList>();
        int positionTerm = 1;
        for (String term : terms) {
            PostingList pList = indexOfDocument.get(term);
            if (pList == null) {
                pList = new PostingList();
                indexOfDocument.put(term, pList);
                memory.subtractIntegers(2);// idDoc and N
            }
            pList.addPositional(docID, positionTerm);
            memory.subtractInteger();
            positionTerm++;
        }
        return indexOfDocument;
    }

    /*
     *method for Tests
     */
    private Map<String, PostingList> indexForTestCorpus(Document doc) {
        Map<String, PostingList> indexOfDocument = new HashMap<String, PostingList>();
        int positionTerm = 1;
        for (StringTokenizer stTokenizer = new StringTokenizer(doc.getContent());stTokenizer.hasMoreTokens();){
            String term = stTokenizer.nextToken();
            PostingList pList = indexOfDocument.get(term);
            if (pList == null) {
                pList = new PostingList();
                indexOfDocument.put(term, pList);
                memory.subtractIntegers(2);// idDoc and N
            }
            pList.addPositional(doc.getID(), positionTerm);
            memory.subtractInteger();
            positionTerm++;
        }
        return indexOfDocument ;
    }

    /*
     *method for Tests
     */
    private List<String> streamOfTermsPorter(String content) {
        List<String> rez = new ArrayList<String>();
        for (StringTokenizer tkzr = new StringTokenizer(content); tkzr
                .hasMoreElements();) {
            rez.add(tkzr.nextToken());
        }
        return rez;
    }

    public int amountWriteToDisk() {
        return amountWriteToDisk ;
    }

    public MemoryManager getMemoryManager() {
        return memory;
    }
}
