package data;

import java.util.Map;
import java.util.TreeMap;
import java.util.Vector;

public class Document implements Data<Document>
{
	public Document(Vector<String> terms, Corpus corp)
	{
		isNormCalculated = false;
		isNormalized = false;
		norm = 0;
		TermSet termFrequency = new TermSet();
		document = new TreeMap<String,Double>();
		
		// Count term frequencies
		for(String s : terms)
		{
			termFrequency.addTerm(s);
		}
		// Calculate tf-idf vectors
		for(Map.Entry<String, Double> e : termFrequency.getTermFrequencies().entrySet())
		{
			double tf  = e.getValue();
			double idf = corp.getInverseDocumentFrequency(e.getKey()); //David: you compute the idf w.r.t. the whole corpus. weren't they just asking for k-means for part1? *confused*
			double w = (1+Math.log10(tf))*(Math.log10(idf));
			document.put(e.getKey(), w);
		}
		normalize();
	}
	
	private Document(TreeMap<String,Double> doc)
	{
		isNormCalculated = false;
		isNormalized = false;
		norm = 0;
		
		document = doc;
		
		normalize();
		
	}


	@Override
	public double getDistance(Document d)
	{
		double scalarProduct = 0;
		for(Map.Entry<String, Double> e : document.entrySet())
		{
			if(d.document.containsKey(e.getKey()))
			{
				scalarProduct += e.getValue()*d.document.get(e.getKey());
			}
		}
		
		scalarProduct = scalarProduct/(getNorm()*d.getNorm());
		return 1.0-Math.abs(scalarProduct);
	}


        /*
        @Override
	public double getDistance(Document d)
	{
		double distance = 0;
		for(Map.Entry<String, Double> e : document.entrySet())
		{
			if(d.document.containsKey(e.getKey()))
			{
				distance += (e.getValue() - d.document.get(e.getKey()))*(e.getValue() - d.document.get(e.getKey()));
			} else {
                                distance += (e.getValue() * e.getValue());
                        }
		}

                for(Map.Entry<String, Double> e : d.document.entrySet()) {
                    
                        if(!document.containsKey(e.getKey())) {
                                distance += (e.getValue() * e.getValue());
                        }
                }

		distance = Math.sqrt(distance);
		return distance;
	}
         *
         */

	
	@Override
	public Document add(Document other) 
	{
		normalize();
		other.normalize();
		TreeMap<String,Double> result = new TreeMap<String,Double>();
		result.putAll(document);
		
		for(Map.Entry<String, Double> e : other.document.entrySet())
		{
			if(result.containsKey(e.getKey()))
			{
				result.put(e.getKey(), result.get(e.getKey())+ e.getValue());
			}
			else
			{
				result.put(e.getKey(), e.getValue());
			}
		}
		
		return new Document(result);
	}
	@Override
        //David: wtf is that :D?
	public Document getZero() 
	{
		return new Document(new TreeMap<String,Double>());
	}
	
	@Override
	// We don't need to implement scaling, since the documents are normalized
	public Document scale(double factor) 
	{
		return this;
	}
	
	public double getNorm()
	{
		if(!isNormCalculated)
		{
			norm = 0;
			for(Double d : document.values())
			{
				norm += d*d;
			}
			isNormCalculated = true;
                        norm = Math.sqrt(norm);
		}
		return norm;
	}

	@Override
	public String toString()
	{
		return document.descendingKeySet().toString();
	}
	
	public void normalize()
	{
		if(!isNormalized)
		{
			double n = getNorm();
			for(Map.Entry<String,Double> e : document.entrySet())
			{
				e.setValue(e.getValue()*1.0/n);
			}
			norm = 1;
			isNormalized = true;
		}
	}
	
	
	
	protected TreeMap<String,Double> document;
	
	private boolean isNormCalculated;
	private boolean isNormalized;
	private double norm;

}
