/**
 *
 */
package qy.course.cse494.deprecated;

import java.io.IOException;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.Vector;

import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;

/**
 * @author yqi
 */
public class DocumentVectorCalculator
{

	static Float TF_CONSTANT_1 = 0.5f;
	static Float TF_CONSTANT_2 = 0.5f;
	static Float IDF_CONSTANT = 0f;
	static boolean IS_TF_NORMALIZED = true;
	static boolean IS_DEBUGGING = false;
	static int MAX_TEST_NUM = 10000;
	// input
	String _data_source_dir = null;

	// output
	Vector<Map<Integer, Float>> _doc_vt = new Vector<Map<Integer,Float>>();
	Vector<Float> _doc_vector_norm_vt = new Vector<Float>();


	// Intermediate
	// here, we use hash-code of the term as the index
	Map<Integer, Float> _term_doc_idf_map = new TreeMap<Integer, Float>();
	Map<Integer, Map<Integer, Float>> _doc_map_of_term_freq_map = new TreeMap<Integer, Map<Integer,Float>>(); // intermediate value
	Map<Integer, Float> _doc_max_term_freq_map = new TreeMap<Integer, Float>();

	Set<String> _term_string_set = new TreeSet<String>();
	
	/**
	 * Default constructor
	 * @param data_source_dir
	 */
	public DocumentVectorCalculator(final String data_source_dir)
	{
		_data_source_dir = data_source_dir;
		_init();
	}

	private void _init()
	{
		Update();
	}

	/**
	 * Once the index files of the documents/corpus are changed,
	 * the method below should be called to recalculate the document
	 * vectors.
	 */
	public void Update()
	{
		try
		{
			IndexReader reader = IndexReader.open(_data_source_dir);
			int num_of_documents = reader.numDocs();
			System.out.println(" Number of Docs in Index :" + reader.numDocs());
			TermEnum termenum = reader.terms();
			int count_test = MAX_TEST_NUM;
			int term_num = 0;
			while(termenum.next())
			{
				++term_num;
				if(IS_DEBUGGING)
				{
					if(count_test-- == 0)
						break;
				}
				Term termval = termenum.term();
				//System.out.println("The Term :" + termval.text() + " Frequency :"+termenum.docFreq());
				
				String term_str = termval.text();
				int term_id = term_str.hashCode();

				if(_term_string_set.contains(term_str))
				{
					System.out.println("!!!!!!!!Catch you! "+term_str);
				}else
				{
					_term_string_set.add(term_str);
				}
				
				// first calculate the values about idf
				int doc_freq = termenum.docFreq();
//				if(_term_doc_idf_map.containsKey(term_id))
//				{
//					System.out.println("!!!!!!!!Catch you! "+termval.text());
//				}
				_term_doc_idf_map.put(term_id, (float)Math.log(num_of_documents/(doc_freq+IDF_CONSTANT)));

				// next calculate the values about tf
				TermDocs termdocs = reader.termDocs(termval);
				while(termdocs.next())
				{
					Map<Integer, Float> doc_term_freq_map = new TreeMap<Integer, Float>();
					int doc_id = termdocs.doc();
					if(_doc_map_of_term_freq_map.containsKey(doc_id))
					{
						doc_term_freq_map = _doc_map_of_term_freq_map.get(doc_id);
					}else
					{
						_doc_max_term_freq_map.put(doc_id, 0.0f);
					}
					//
					float term_freq = 0f;
					if(doc_term_freq_map.containsKey(term_id))
					{
						term_freq = doc_term_freq_map.get(term_id);
					}
					term_freq += termdocs.freq();
					doc_term_freq_map.put(term_id, term_freq);
					//
					_doc_map_of_term_freq_map.put(doc_id, doc_term_freq_map);

					///
					if(term_freq > _doc_max_term_freq_map.get(doc_id))
					{
						_doc_max_term_freq_map.put(doc_id, term_freq);
					}
//					System.out.println("++++"+reader.document(doc_id).get("url"));
//					System.out.println("**"+termdocs.freq());
				}

			}
			System.out.println("Term number is:" + term_num);
			long mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
			System.out.println("****"+mem);
			calculate(true);

		} catch(IOException e)
		{
			e.printStackTrace();
		}
	}

	private void calculate(boolean which)
	{
		long startTime = System.currentTimeMillis();
		if(which)
		{
			calculate_document_vectors();
		}else
		{
			compute_document_vectors();
		}
		long stopTime = System.currentTimeMillis();
		long runTime = stopTime - startTime;
		System.out.println("Run time: " + runTime);
	}
	

	protected void compute_document_vectors()
	{
		try
		{
			IndexReader reader = IndexReader.open(_data_source_dir);
			int num_of_documents = reader.numDocs();
			TermEnum termenum = reader.terms();
			
			int count_test = MAX_TEST_NUM;
			while(termenum.next())
			{
				if(IS_DEBUGGING)
				{
					if(count_test-- == 0)
						break;
				}
				Term termval = termenum.term();
				String term_str = termval.text();
				int term_id = term_str.hashCode();

				// next calculate the values about tf
				TermDocs termdocs = reader.termDocs(termval);
				while(termdocs.next())
				{
					int doc_id = termdocs.doc();
					float max_freq = _doc_max_term_freq_map.get(doc_id);
					//System.out.println("doc id is:"+doc_id+" and term id is:"+term_id);
					Map<Integer, Float> cur_term_freq_map = _doc_map_of_term_freq_map.get(doc_id);
					
					float weight = cur_term_freq_map.get(term_id);
					if(IS_TF_NORMALIZED)
					{
						weight = TF_CONSTANT_1 + TF_CONSTANT_2 * (weight/max_freq);
					}
					weight *= _term_doc_idf_map.get(term_id);

					cur_term_freq_map.put(term_id, weight);
					
//					System.out.println("++++"+termdocs.doc());
//					System.out.println("**"+termdocs.freq());
				}

			}
		} catch(IOException e)
		{
			e.printStackTrace();
		}
	}

	/**
	 * The method should not be called unless the information about
	 * term-frequency and inverse-document-frequency is collected.
	 */
	protected void calculate_document_vectors()
	{
		for(Integer cur_doc_id : _doc_map_of_term_freq_map.keySet())
		{
			//Map<Integer, Float> term_weight_map = new TreeMap<Integer, Float>();
			float max_freq = _doc_max_term_freq_map.get(cur_doc_id);
			float norm_value = 0f;

			Map<Integer, Float> cur_term_freq_map = _doc_map_of_term_freq_map.get(cur_doc_id);
			for(Integer cur_term_id : cur_term_freq_map.keySet())
			{
				// deal with the tf
				float weight = cur_term_freq_map.get(cur_term_id);
				if(IS_TF_NORMALIZED)
				{
					weight = TF_CONSTANT_1 + TF_CONSTANT_2 * (weight/max_freq);
				}
				weight *= _term_doc_idf_map.get(cur_term_id);

				cur_term_freq_map.put(cur_term_id, weight);
				norm_value += weight * weight;
			}
			_doc_vector_norm_vt.add((float)Math.sqrt(norm_value));
			//
		}
		long mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
		System.out.println("****"+mem);
	}

	public void test()
	{
		System.out.println("The number of documents is:"+_doc_map_of_term_freq_map.size());
		System.out.println("The number of terms is :"+_term_doc_idf_map.size());
		System.out.println("The size of the term set is:"+_term_string_set.size());
		
	}
	
	/**
	 * @param args
	 */
	public static void main(String[] args)
	{
		System.out.println("Welcome to the class DataCorpus!");
		DocumentVectorCalculator dc = new DocumentVectorCalculator("result3index");
		dc.test();
	}

}
