/**
 * The class creates the document vector based on the TF-IDF. The result is stored as the  
 */
package qy.course.cse494.ir;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;

import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;

import qy.course.cse494.config.ConfigureCenter;

/**
 * @author qyan
 */
public class DocumentIndex 
{
	// constants
	static float TF_CONSTANT_1 = (float) ConfigureCenter.get_tfidf_constant_tf_1();
	static float TF_CONSTANT_2 = (float) ConfigureCenter.get_tfidf_constant_tf_2();
	static float IDF_CONSTANT = (float) ConfigureCenter.get_tfidf_constant_idf_1();
	static boolean IS_TF_NORMALIZED = ConfigureCenter.is_debugged();
	
	// input variables
	String _index_data_source_dir = null;
		
	// output variables
	Map<Integer, Float> _term_doc_idf_map = new HashMap<Integer, Float>();
	Map<Integer, Map<Integer, Float>> _term_doc_weight_index_map = new HashMap<Integer, Map<Integer,Float>>();  

	// Intermediate variables
	Map<Integer, Integer> _doc_max_term_freq_map = new HashMap<Integer, Integer>();
	Map<Integer, String> _term_dictionary_map = new HashMap<Integer, String>();
	Map<Integer, Float> _doc_norm_map = new HashMap<Integer, Float>();
	Map<Integer, String> _doc_name_dictionary_map = new HashMap<Integer, String>();
	Map<Integer, String> _doc_hashcode_dictionary_map = new HashMap<Integer, String>();
	

	public DocumentIndex(final String index_file_name, boolean import_only)
	{
		import_index(index_file_name);
	}
	
	public void import_index(final String index_file_name)
	{
		try
        {
            FileReader fread = new FileReader(index_file_name);
            BufferedReader in = new BufferedReader(fread);

            // 1. Map<Integer, Float> _term_doc_idf_map
			//_read_map(in, _term_doc_idf_map, 1);
			
			// 2. Map<Integer, Map<Integer, Float>> _term_doc_weight_index_map
			String sin = in.readLine();
			int size_of_map = Integer.parseInt(sin);
			for(int i=0; i<size_of_map; ++i)
			{
				sin = in.readLine();
				int key = Integer.parseInt(sin);
				Map<Integer, Float> int_double_map = new HashMap<Integer, Float>();
				_read_map(in, int_double_map, 1);
				_term_doc_weight_index_map.put(key, int_double_map);
			}
			
			// 3. Map<Integer, Integer> _doc_max_term_freq_map;
			//_read_map(in, _doc_max_term_freq_map, 0);
			
			// 4. Map<Integer, String> _term_dictionary_map;
			//_read_map(in, _term_dictionary_map, 2);
			
			// 5. Map<Integer, Float> _doc_norm_map;
			_read_map(in, _doc_norm_map, 1);
			
			// 6. Map<Integer, String> _doc_name_dictionary_map;
			_read_map(in, _doc_name_dictionary_map, 2);

			// 6. Map<Integer, String> _doc_hashcode_dictionary_map;
			_read_map(in, _doc_hashcode_dictionary_map, 2);
            
            
            in.close();
            fread.close();
            
        }catch(FileNotFoundException e1)
        {
        	e1.printStackTrace();
        }catch(IOException e2)
        {
        	e2.printStackTrace();
        }
	}
	
	@SuppressWarnings("unchecked")
	private void _read_map(BufferedReader in, Map obj_map, int type) throws IOException
	{
		String sin = in.readLine();
		int size_of_map = Integer.parseInt(sin);
		for(int i=0; i<size_of_map; ++i)
		{
			sin = in.readLine();
			int key = Integer.parseInt(sin);
			sin = in.readLine();
			switch(type)
			{
			case 0: // int
				int value = Integer.parseInt(sin);
				obj_map.put(key, value);
				break;
			case 1: // float
				float value1 = Float.parseFloat(sin);
				obj_map.put(key, value1);
				break;
			case 2: // string 
				obj_map.put(key, sin);
			}
		}
	}
	
	public void export_index(final String index_file_name)
	{
		try 
		{
			FileWriter fpw = new FileWriter(index_file_name);
			// 1. Map<Integer, Float> _term_doc_idf_map
			//_write_map(fpw, _term_doc_idf_map);
			
			// 2. Map<Integer, Map<Integer, Float>> _term_doc_weight_index_map
			fpw.write(_term_doc_weight_index_map.size()+"\n");
			for(Integer cur_term_id : _term_doc_weight_index_map.keySet())
			{
				fpw.write(cur_term_id+"\n");
				_write_map(fpw, _term_doc_weight_index_map.get(cur_term_id));
			}
			
			System.out.println("_term_doc_weight_index_map over!");
			
			// 3. Map<Integer, Integer> _doc_max_term_freq_map;
			//_write_map(fpw, _doc_max_term_freq_map);
			
			// 4. Map<Integer, String> _term_dictionary_map;
			//_write_map(fpw, _term_dictionary_map);
			
			System.out.println("_term_dictionary_map over!");
			
			// 5. Map<Integer, Float> _doc_norm_map;
			_write_map(fpw, _doc_norm_map);
			
			System.out.println("_doc_norm_map over!");
			
			// 6. Map<Integer, String> _doc_name_dictionary_map;
			_write_map(fpw, _doc_name_dictionary_map);
			
			System.out.println("_doc_name_dictionary_map over!");
			
			// 7. Map<Integer, String> _doc_hashcode_dictionary_map;
			_write_map(fpw, _doc_hashcode_dictionary_map);
			
			System.out.println("_doc_hashcode_dictionary_map over!");
			
			fpw.close();
			
		}catch (Exception e) 
		{
			e.printStackTrace();
		}
	}
	
	private void _write_map(FileWriter fpw, Map<Integer,?> obj_map) throws IOException
	{
		fpw.write(obj_map.size()+"\n");
		for(Integer cur_page_id : obj_map.keySet())
		{
			fpw.write(cur_page_id+"\n");
			fpw.write(obj_map.get(cur_page_id)+"\n");
		}
		fpw.flush();
		
	}
	
	public DocumentIndex(final String data_dir)
	{
		_index_data_source_dir = data_dir;
		_init();
	}
	
	private void _init()
	{
		update();
	}
	
	public void update()
	{
		long startTime, stopTime, runTime;
		startTime = System.currentTimeMillis();
		//
		
		collect_information();
		
		stopTime = System.currentTimeMillis();
		runTime = stopTime - startTime;
		System.out.println("Run time for the index scanning: " + runTime);
		long mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
		System.out.println("****"+mem);
		//
		
		calculate_weight();
		
		
		runTime = System.currentTimeMillis() - stopTime;
		System.out.println("Run time for the vector calculation: " + runTime);
		mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
		System.out.println("****"+mem);
		//
		System.out.println("The number of documents is:"+_doc_norm_map.size());
	}
	
	/**
	 * The information about the term-frequency and inverse-document-frequency, is collected 
	 * or calculated.
	 */
	protected void collect_information()
	{
		try
		{
			IndexReader reader = IndexReader.open(_index_data_source_dir);
			int num_of_documents = reader.numDocs();
			TermEnum termenum = reader.terms();
			
			while(termenum.next())
			{
				Term termval = termenum.term();
				if(!termval.field().equals("contents"))
				{
					continue;
				}
				
				String term_str = termval.text();
				int term_id = term_str.hashCode();
				
				//
				_term_dictionary_map.put(term_id, term_str);
				
				// first calculate the values about idf
				int doc_freq = termenum.docFreq();
				_term_doc_idf_map.put(term_id, (float) Math.log(num_of_documents/(doc_freq+IDF_CONSTANT)));

				// next collect the values about tf
				Map<Integer, Float> doc_weight_list = new HashMap<Integer, Float>();
				TermDocs termdocs = reader.termDocs(termval);
				while(termdocs.next())
				{
					int doc_id = termdocs.doc();
					int term_freq = termdocs.freq();
					
					// collect the information of inverse-document-frequency in favor of the later compuation.
					doc_weight_list.put(doc_id, (float)term_freq);
					
					// try to find the maximum of term frequency for the normalization of the tf value.
					if(!_doc_max_term_freq_map.containsKey(doc_id))
					{
						_doc_max_term_freq_map.put(doc_id, 0);
						
						// store the url of the corresponding document in the map in favor of the later use.
						String doc_url = reader.document(doc_id).get("url").substring(8);
						_doc_name_dictionary_map.put(doc_id, doc_url);
						_doc_hashcode_dictionary_map.put(doc_url.hashCode(), doc_url);
					}
					//
					if(term_freq > _doc_max_term_freq_map.get(doc_id))
					{
						_doc_max_term_freq_map.put(doc_id, term_freq);
					}
				}
				//
				_term_doc_weight_index_map.put(term_id, doc_weight_list);
				
			}

		} catch(IOException e)
		{
			e.printStackTrace();
		}
	}
	
	/**
	 * Compute the tf-idf value of the term in terms of document. 
	 */
	protected void calculate_weight()
	{
		for(Integer cur_term_id : _term_doc_weight_index_map.keySet())
		{
			Map<Integer, Float> cur_doc_weight_list = _term_doc_weight_index_map.get(cur_term_id);
			for(Integer cur_doc_id : cur_doc_weight_list.keySet())
			{
				float norm_value = 0;
				if(!_doc_norm_map.containsKey(cur_doc_id))
				{
					_doc_norm_map.put(cur_doc_id, 0.0f);
				}else
				{
					norm_value = _doc_norm_map.get(cur_doc_id);
				}
				
				int max_freq = _doc_max_term_freq_map.get(cur_doc_id);
				float weight = cur_doc_weight_list.get(cur_doc_id);
				// it's not correct to use the below formula unless the weight of query is computed. 
				if(IS_TF_NORMALIZED)
				{
					//weight = TF_CONSTANT_1 + TF_CONSTANT_2 * (weight/max_freq);
					weight = weight/max_freq;
				}
				weight *= _term_doc_idf_map.get(cur_term_id);
				cur_doc_weight_list.put(cur_doc_id, weight);
				
				norm_value += weight*weight;
				_doc_norm_map.put(cur_doc_id, norm_value);
			}
		}
		// ending 
		for(Integer cur_doc_id : _doc_norm_map.keySet())
		{
			float norm_value = _doc_norm_map.get(cur_doc_id);
			_doc_norm_map.put(cur_doc_id, (float) Math.sqrt(norm_value));
		}
			
		///
//		long mem = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
//		System.out.println("****"+mem);
	}

	/**
	 * @return the _term_doc_weight_index_map
	 */
	public Map<Integer, Map<Integer, Float>> get_term_doc_weight_index_map()
	{
		return _term_doc_weight_index_map;
	}
	
	/**
	 * Return index list if possible. 
	 * @param term
	 * @return
	 */
	public Map<Integer, Float> get_doc_weight_indices_of_term(final String term)
	{
		if(_term_doc_weight_index_map.containsKey(term.hashCode()))
		{
			return _term_doc_weight_index_map.get(term.hashCode());
		}
		return null;
	}

	/**
	 * Return the norm value of the weight vector corresponding to the 
	 * document with doc_id.
	 * @param doc_id
	 * @return
	 */
	public double get_norm_value_of_doc_weight(int doc_id)
	{
//		if(doc_id >= _doc_norm_map.size())
//		{
//			System.err.println("The doc "+doc_id+" doesn't exist!!!");
//		}
		return _doc_norm_map.get(doc_id);
	}
	
	/**
	 * Return the url of the document with 'doc_id'.
	 * @param doc_id
	 * @return
	 */
	public String get_doc_name(int doc_id)
	{
		return _doc_name_dictionary_map.get(doc_id);
	}
	
	/**
	 * The interactive process for query processing 
	 * is constructed in the main function. 
	 * @param args
	 */
	public static void main(String[] args)
	{
		DocumentIndex di = new DocumentIndex("data/result3index");
//		di.export_index("data/index4result3");
//		DocumentIndex di = new DocumentIndex("data/index4result3", true);
		DocumentSearch searcher = new DocumentSearch(di);
		//
		try
		{
			BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
		    while (true) 
		    {
		    	System.out.print("Query: ");
		    	String line = in.readLine();

		    	if (line.length() == -1)
		    		break;

		    	long startTime, stopTime;
				startTime = System.currentTimeMillis();
				
		    	DocumentQuery query = new DocumentQuery(line);
		    	
		    	DocumentHits hits = searcher.search(query, 100);
		    	System.out.println(hits.length() + " total matching documents");

		    	stopTime = System.currentTimeMillis();
		    	System.out.println("Run time for the query processing: " + (stopTime - startTime));
		    	
		    	final int HITS_PER_PAGE = ConfigureCenter.get_search_hit_number_per_page();
		    	for (int start = 0; start < hits.length(); start += HITS_PER_PAGE) 
		    	{
		    		int end = Math.min(hits.length(), start + HITS_PER_PAGE);
		    		for (int i = start; i < end; i++)
		    		{
		    			System.out.println(i + " : "+hits.get_weight_ranked_at(i)+" : " 
		    					+ hits.get_doc_id_ranked_at(i) + " :: "
		    					+ di.get_doc_name(hits.get_doc_id_ranked_at(i)));
		    			//System.out.println(i + " : "+hits.get_weight_ranked_at(i)+" :: "+di.get_doc_name(hits.get_doc_id_ranked_at(i)));
		    			//System.out.println(i + ". " + hits.doc(i).get("url"));
		    			//  System.out.println(i + ". " + hits.doc(i).toString());
		    		}
		    		if (hits.length() > end) 
		    		{
		    			System.out.print("more (y/n) ? ");
		    			line = in.readLine();
		    			if (line.length() == 0 || line.charAt(0) == 'n')
		    				break;
		    		}
		    	}
		    }
		} catch(Exception e)
		{
			e.printStackTrace();
		}
	}

	/**
	 * @return the _doc_hashcode_dictionary_map
	 */
	public String get_doc_name_from_hashcode(int code)
	{
		return _doc_hashcode_dictionary_map.get(code);
	}
}
