package de.uniwue.cs.ir.vsr;

import java.util.HashMap;
import java.util.Iterator;
import java.util.List;

public class InvertedIndexImpl implements IInvertedIndex {
	
	private HashMap<String, ITokenInfo> termsHash = new HashMap<String, ITokenInfo>();
	private int maxTf = 0;         // maximal term frequency (calculated over the entire corpus)

	public InvertedIndexImpl(ICorpus corpus) {
		
		// iterating through the corpus, processing every document:
		Iterator<IDocument> corpusItr = corpus.iterator(); 
		while(corpusItr.hasNext()) {
			// iterating through the strings in a given document:
			IDocument doc = corpusItr.next();
			Iterator<String> docItr = doc.iterator();
				while(docItr.hasNext()) {
					
					String term = docItr.next();
						
					// checking if the actual string is already present in the hash:
					if(!termsHash.containsKey(term)) {   // the term isn't yet in the hash map 
			    		ITokenInfo tokenInfo = new TokenInfoImpl();
			    		termsHash.put(term, tokenInfo);
			    	}
					
					List<Integer> positions = doc.getTermPositions(term);
					ITokenOccurrence posting = new TokenOccurrenceImpl(doc, positions);
					termsHash.get(term).addPosting(posting);
					
					// refreshing the maxTf:
					int size = positions.size();
					if (size > maxTf) {
						maxTf = size;
					}
				}	
		}
		
		double N = corpus.size();
		//calculate idf for every term
		for (ITokenInfo tokenInfo : termsHash.values()) {
			int df = tokenInfo.getTok().size(); //document frequency (number of documents that contain the term)
			double idf = Math.log(N / df);
			tokenInfo.setIdf(idf);
			System.out.print("df  " + df + " ");
			
			for (ITokenOccurrence tokenOcc : tokenInfo.getTok()) {
				double f = tokenOcc.getPositions().size();
				double tf = f / getMaxTf();
				//System.out.println("tf " + tf + " f " + f + " max " + getMaxTf());
				tokenOcc.setWeight(tf*tokenInfo.getIdf());
			}
		}
		
	}

	@Override
	public Iterator<? extends ITokenOccurrence> getWeightsForTerm(String term) {
		ITokenInfo tokInf = termsHash.get(term);
		return tokInf.getTok().iterator();
	}

	@Override
	public ITokenInfo getTokenInfo(String term) {
		return termsHash.get(term);
	}

	@Override
	public double getIdf(String term) {
		return termsHash.get(term).getIdf();
	}

	@Override
	public int getMaxTf() {
		return maxTf;
	}

	@Override
	// nincs egyelore otletem, h mi egy dokumentum normaja; utana ken' nezni
	public double getNormDoc(IDocument doc) {
		// TODO Auto-generated method stub
		return 0;
	}

}
