/**
 * In this class, we will read information from index data file, and try to 
 * finish the construction of the vectors. 
 */
package qy.course.cse494.deprecated;

import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import java.util.Vector;

import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;

/**
 * @author yqi
 *
 */
public class DataCorpus
{
	static double TF_CONSTANT_1 = 0.5;
	static double TF_CONSTANT_2 = 0.5;
	static double IDF_CONSTANT = 0;
	static boolean IS_TF_NORMALIZED = true;
	static boolean IS_DEBUG = false;
	
	// input
	String _data_source_dir = null;
	
	// output
	Vector<Map<Integer, Double>> _doc_vt = new Vector<Map<Integer,Double>>();
	Vector<Double> _doc_vector_norm_vt = new Vector<Double>();
	
	
	// Intermediate
	// here, we use hash-code of the term as the index
	Map<Integer, Double> _term_doc_idf_map = new TreeMap<Integer, Double>();
	Map<Integer, Map<Integer, Integer>> _doc_map_of_term_freq_map = new TreeMap<Integer, Map<Integer,Integer>>(); // intermediate value
	Map<Integer, Integer> _doc_max_term_freq_map = new TreeMap<Integer, Integer>();

	/**
	 * Default constructor
	 * @param data_source_dir
	 */
	public DataCorpus(final String data_source_dir)
	{
		_data_source_dir = data_source_dir;
		_init();
	}
	
	private void _init()
	{
		Update();
	}
	
	/**
	 * Once the index files of the documents/corpus are changed,
	 * the method below should be called to recalculate the document
	 * vectors. 
	 */
	public void Update()
	{
		try
		{
			IndexReader reader = IndexReader.open(_data_source_dir);
			int num_of_documents = reader.numDocs();
			//System.out.println(" Number of Docs in Index :" + reader.numDocs());
			TermEnum termenum = reader.terms();
			
			int test_limit = 10;
			while(termenum.next())
			{
				if(IS_DEBUG)
				{
					if(test_limit-- == 0)
						break;
				}
				Term termval = termenum.term();
				//System.out.println("The Term :" + termval.text() + " Frequency :"+termenum.docFreq());
				String term_str = termval.text();
				int term_id = term_str.hashCode();
				
				// first calculate the values about idf
				int doc_freq = termenum.docFreq();
				_term_doc_idf_map.put(term_id, Math.log(num_of_documents/(doc_freq+IDF_CONSTANT)));
				
				// next calculate the values about tf
				TermDocs termdocs = reader.termDocs(termval);
				Map<Integer, Integer> doc_term_freq_map = new TreeMap<Integer, Integer>();
				while(termdocs.next())
				{
					int doc_id = termdocs.doc();
					if(_doc_map_of_term_freq_map.containsKey(doc_id))
					{
						doc_term_freq_map = _doc_map_of_term_freq_map.get(doc_id);
					}else
					{
						_doc_max_term_freq_map.put(doc_id, 0);
					}
					//
					int term_freq = 0;
					if(doc_term_freq_map.containsKey(term_str.hashCode()))
					{
						term_freq = doc_term_freq_map.get(term_id);
					}
					++term_freq;
					doc_term_freq_map.put(term_id, term_freq);
					//
					_doc_map_of_term_freq_map.put(doc_id, doc_term_freq_map);
					
					///
					if(term_freq > _doc_max_term_freq_map.get(doc_id))
					{
						_doc_max_term_freq_map.put(doc_id, term_freq);
					}
//					System.out.println("++++"+termdocs.doc());
//					System.out.println("**"+termdocs.freq());
				}
				
			}
//			long mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
//			System.out.println("****"+mem);
			//calculate_document_vectors();
			
		} catch(IOException e)
		{
			e.printStackTrace();
		}
	}
	
	/**
	 * The method should not be called unless the information about
	 * term-frequency and inverse-document-frequency is collected.  
	 */
	protected void calculate_document_vectors()
	{
		for(Integer cur_doc_id : _doc_map_of_term_freq_map.keySet())
		{
			Map<Integer, Double> term_weight_map = new TreeMap<Integer, Double>();
			int max_freq = _doc_max_term_freq_map.get(cur_doc_id);
			double norm_value = 0;
				
			Map<Integer, Integer> cur_term_freq_map = _doc_map_of_term_freq_map.get(cur_doc_id);
			for(Integer cur_term_id : cur_term_freq_map.keySet())
			{
				// deal with the tf
				double weight = cur_term_freq_map.get(cur_term_id);
				if(IS_TF_NORMALIZED)
				{
					weight = TF_CONSTANT_1 + TF_CONSTANT_2 * (weight/max_freq);
				}
				weight *= _term_doc_idf_map.get(cur_term_id);
				
				term_weight_map.put(cur_term_id, weight);
				norm_value += weight * weight;
			}
			_doc_vt.add(term_weight_map);
			_doc_vector_norm_vt.add(Math.sqrt(norm_value));
			//
			long mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
			System.out.println("****"+mem);
		}
	}
	
	public void test()
	{
		for(Map<Integer, Double> cur_map : _doc_vt)
		{
			//if(cur_map.size() > 1)
			{
				System.out.println(cur_map);
			}
		}
		System.out.println("The number of documents is:"+_doc_vt.size());
		System.out.println("The number of terms is :"+_term_doc_idf_map.size());
		
	}
	/**
	 * @param args
	 */
	public static void main(String[] args)
	{
		System.out.println("Welcome to the class DataCorpus!");
		DataCorpus dc = new DataCorpus("result3index");
//		System.out.println(dc._doc_vector_norm_vt);
		dc.test();
	}

}
