import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Date;
import java.util.Iterator;
import java.util.Vector;

public class IndexGenerator {
	public static final String DELIMITER = "==";
	public static final String ENDING_DELIMITER = "##";
	
	public static void createIndex(Corpus corpus, Options options) throws IOException, FileNotFoundException{
		
		String todays_date_and_time, break_chars, stop_word_file,
		       total_corpus_char_size, total_corpus_word_size, 
			   total_corpus_line_size, total_number_corpus_files;
		
		String complete_header = "";
		Vector bloom_filters = new Vector();
		
		// Getting today's date as a string
		Date today = new Date();
		todays_date_and_time = today.toString();
		
		// Getting the break characters used.
		break_chars = options.getBreaks();		
		
		// Getting the stop word file name.
		if(options.getWordAnalyzer() == null){
			stop_word_file = "";
		} else {
			stop_word_file = options.getWordAnalyzer().getStopFileLocation();
		}
		
		// Getting the total corpus character, line, and word size.
		Vector corpus_files = corpus.getCorpusFiles();
		int character_size = 0;
		int word_size = 0;
		int line_size = 0;
		int total_corpus_files = 0;
		
		
		for (Iterator i = corpus_files.iterator(); i.hasNext();) {
			CorpusFile file = (CorpusFile) i.next();
			total_corpus_files++;
			character_size += file.getNumberOfCharacters();
			word_size += file.getNumberOfWords();
			line_size += file.getNumberOfLines();
		}
		total_corpus_char_size = "" + character_size;
		total_corpus_line_size = "" + line_size;
		total_corpus_word_size = "" + word_size;
		total_number_corpus_files = "" + total_corpus_files;
		
		// Putting all of the static header information together
		
		complete_header = todays_date_and_time + DELIMITER + break_chars + DELIMITER +
			              stop_word_file + DELIMITER + total_corpus_char_size + DELIMITER + 
						  total_corpus_word_size + DELIMITER + total_corpus_line_size
						  + DELIMITER + total_number_corpus_files;
		
		
		
		// For each file, we have to put in the following information in this order:
		// Filename
		// number of characters
		// number of words
		// number of lines
		// size of the bloom filter for the complete file
		// size of the bloom filter for just one line
		String dynamic_header = "";
		
		for (Iterator i = corpus_files.iterator(); i.hasNext();) {
			CorpusFile file = (CorpusFile) i.next();
			String file_name = file.getFileName();
			int num_characters = file.getNumberOfCharacters();
			int num_words = file.getNumberOfWords();
			int num_lines = file.getNumberOfLines();
			
			//Hack used to get around division by 0 at line 91
			int param;
			if(num_lines == 0)
				param = 0;
			else
				param = num_characters/num_lines;
				
			
			
			int complete_file_bloom_filter_size = calculateOptimalFileBloomFilterSize(num_words);
			int line_bloom_filter_size = calculateOptimalLineBloomFilterSize(param);
			
			// Add the info to the header
			dynamic_header += DELIMITER + file_name + DELIMITER + num_characters + DELIMITER +
							  num_words + DELIMITER + num_lines + DELIMITER + 
							  complete_file_bloom_filter_size + DELIMITER + 
							  line_bloom_filter_size;
			
			// Now we have to make the bloom filters.
			// The complete file bloom filter is easy enough.
			BloomFilter file_bloom_filter = new BloomFilter(complete_file_bloom_filter_size);
			Vector file_words = file.getCorpusWords();
			for (Iterator j = file_words.iterator(); j.hasNext();) {
				CorpusWord word = (CorpusWord) j.next();				
				file_bloom_filter.addWord(word.getWord());
			}
			bloom_filters.add(file_bloom_filter);
			
			// Now have to add the ones for the lines.
			BloomFilter[] line_bloom_filters = new BloomFilter[num_lines];
			for (int k = 0 ; k < line_bloom_filters.length; k++) {
				line_bloom_filters[k] = new BloomFilter(line_bloom_filter_size);
			}
			for (Iterator j = file_words.iterator(); j.hasNext();) {
				CorpusWord word = (CorpusWord) j.next();
				Vector line_numbers = word.getLineNumbers();
				// TODO  Assuming these are sorted.  If not, have to sort them first.
				int last_line_number = -1; // Just something so it can't be repeated the first time.
				int number_repeated = 0;
				
				for (Iterator l = line_numbers.iterator(); l.hasNext();) {
					Integer line = (Integer) l.next();
					if (line.intValue() == last_line_number) {
						number_repeated++;
						//System.out.println("Adding " + word.getWord() + " to line " + line.intValue());
						line_bloom_filters[line.intValue()-1].addWord(word.getWord() + "+" + (number_repeated + 1));
					} else {						
						//System.out.println("Adding " + word.getWord() + " to line " + line.intValue());
						line_bloom_filters[line.intValue()-1].addWord(word.getWord());
						last_line_number = line.intValue();
						number_repeated = 0;
					}
				}								
			}
			// Finally able to add the bloom filters for the lines
			for (int k = 0 ; k < line_bloom_filters.length; k++) {
				bloom_filters.add(line_bloom_filters[k]);
			}			
		}
		// Add the two headers together and end it with a end of header delimiter
		complete_header += dynamic_header + ENDING_DELIMITER;
		
			// Time for File I/O.
			// TODO  Need to be able to specify filename somehow.
			FileOutputStream stream = new FileOutputStream("AIM.out");
			// Write the header to file (easy enough)
			stream.write(complete_header.getBytes());
			// Write all the bloom filters to file (still easy)
			for (Iterator i = bloom_filters.iterator(); i.hasNext();) {
				BloomFilter filter = (BloomFilter) i.next();
				filter.writeBloomToFile(stream);
			}		
			
			// Should be done.
			stream.close();
	}

	public static int calculateOptimalFileBloomFilterSize(int words) {
		return (int) Math.floor((words * BloomFilter.HASHES) / .7);
 	}

	public static int calculateOptimalLineBloomFilterSize(int words) {		
		return (int) Math.floor((words * BloomFilter.HASHES) / 1.1);
	}
}
