package telex;
/*
 * QuerySorter.java
 *
 * Copyright (c) 2007-2008 Colin Bayer, Douglas Downey, Oren Etzioni,
 *   University of Washington Computer Science and Engineering
 * 
 * See the file "COPYING" in the root directory of this distribution for
 * full licensing information.
 */

import java.io.File;
import java.util.*;

public class QuerySorter {	
	public static boolean sortFile(String aInputPath, String aOutputPath) {
		IQFReader ir;
		
		try {
			ir = new IQFReader(aInputPath);
		} catch (Exception e) {
			System.out.println("Couldn't open IQFReader on " + aInputPath + " (" + e + ")");
			return false;
		}

		TreeSet<Query> qt = new TreeSet<Query>();
		
		try {
			Query q;
			
			while ((q = ir.nextQuery()) != null) {
				qt.add(q);
			}
			ir.close();
		} catch (Exception e) {
			System.err.println("Couldn't read query from " + aInputPath + " (" + e + ")");
			return false;
		}
		
		IQFWriter iw;
		
		try {
			iw = new IQFWriter(aOutputPath);
			
			for (Query q : qt.toArray(new Query[0])) {
				iw.writeQuery(q);
			}
			
			iw.finish();
		} catch (Exception e) {
			System.err.println("Couldn't write sorted query list to " + aOutputPath + " (" + e + ")");
			return false;
		}
		
		return true;
	}
	
	public static boolean doMerge(String[] aInputPaths, String aOutputPath) {
		class FilePointer {
			FilePointer(IQFReader aIR, Query aQ) { ir = aIR; next_q = aQ; }
			
			IQFReader ir;
			Query next_q;
		}
		
		LinkedList<FilePointer> filps = new LinkedList<FilePointer>();
		IQFWriter iw;
		
		try {
			iw = new IQFWriter(aOutputPath);
		} catch (Exception e) {
			System.err.println("Couldn't open output file " + aOutputPath + " for writing: " + e);
			return false;
		}
		
		for (int i = 0; i < aInputPaths.length; i++) {
			try {
				IQFReader ir = new IQFReader(aInputPaths[i]);
				
				filps.add(new FilePointer(ir, null));
			} catch (Exception e) {
				System.err.println("Couldn't open input file " + aInputPaths[i] + " (idx=" + i + ") for reading: " + e);
				return false;
			}
		}
		
		// merge the next-ordered queries together, then write the merged query to the output file.
		while (filps.size() > 0) {
			Query min = null;
			FilePointer fp_min = null;
			
			// locate the minimum query among the set of next-queries-to-be-merged among all file pointers.
			for (ListIterator<FilePointer> i = filps.listIterator(); i.hasNext();) {
				FilePointer fp = i.next();
				
				if (fp.next_q == null) {
					// next query is null.  this file had the minimum when the last element was merged (or its first
					// query hasn't been read yet), so we need to read another from disk.
					Query q = null;
					
					try {
						q = fp.ir.nextQuery();
					} catch (Exception e) {
						System.err.println("Error reading query from " + fp.ir.getPath() + ": " + e);
						return false;
					}
					
					if (q == null) {
						// end of file.  drop this file pointer from the list and close the reader.
						fp.ir.close();
						i.remove();
						continue;
					} else {
						// put the new query in the file pointer.
						fp.next_q = q;
					}
				}
				
				if (min == null || min.compareTo(fp.next_q) > 0) {
					// this fp's next query compares before the current minimum.
					// put back the old minimum if min_fp != null, clear fp's next query, and set min and min_fp.
					if (fp_min != null) fp_min.next_q = min;
					
					min = fp.next_q;
					fp.next_q = null;
					fp_min = fp;
				} else if (min.compareTo(fp.next_q) == 0) {
					// next query compares equally to the current minimum.
					// merge it by merging its loc data with the current minimum's, 
					// clear fp's next query because it doesn't exist any more.
					// FIXME: if we're going to use the user data for things like feature vectors, we'll need
					// to provide some formal support for merging queries' user data.
					
					String loc = min.getUserFeature("loc"), fp_loc = fp.next_q.getUserFeature("loc"),
						new_loc;
					
					if (loc == null) new_loc = fp_loc;
					else if (fp_loc == null) new_loc = loc;
					else new_loc = loc + "," + fp_loc;
					
					min.setUserFeature("loc", new_loc);
					fp.next_q = null;
				} else; // next query compares above the current minimum. proceed.
			}
			
			if (min == null) {
				// if min is null, then we closed the last input file last time we merged.
				continue;
			}
			
			// finally, write min to the output file.
			try {
				iw.writeQuery(min);
			} catch (Exception e) {
				System.err.println("Error writing query to output file " + iw.getPath() + ": " + e);
				return false;
			}
		}
		
		// done merging.  finalize and close!
		try {
			if (!iw.finish()) {
				System.err.println("Error finalizing output file " + iw.getPath() + ".");
				return false;
			} else {
				//System.out.println("Wrote " + iw.getQueryCount() + " queries.");
			}
		} catch (Exception e) {
			System.err.println("Exception finalizing output file " + iw.getPath() + ": " + e);
			return false;
		}
		
		return true;
	}
	
	public static int[] generateMergeSchedule(int aFilesToMerge, int aHardLimit) {
		if (aFilesToMerge <= 1) return new int[0]; // if one or fewer files, no merging required.
		
		ArrayList<Integer> sched = new ArrayList<Integer>();
		
		for (int i = aFilesToMerge; i > 1;) {
			int this_round = Math.min(i, aHardLimit);
			sched.add(this_round);
			
			i = (int)Math.ceil((double)i / this_round);
		}
		
		int[] rv = new int[sched.size()];
		
		for (int i = 0; i < sched.size(); i++) rv[i] = sched.get(i);
		
		return rv;
	}
	
	static void banner() {
		System.out.print(
			"teLEX Query Sorter v. 0.1\n" +
			"Colin Bayer and Douglas Downey <{vogon,ddowney}@cs.washington.edu>\n" +
			"Based on an algorithm in 'Locating Named Entities in Web Text'\n" +
				"\t(Downey, Broadhead, Etzioni 2007, published in IJCAI 2007 Proceedings)\n" +
			"See README for a description of the Query Sorter and teLEX in general.\n" +
			"=====================================================================================\n"
		);
	}
	
	static void usage() {
		banner();
		System.err.print(
			"Usage: java QuerySorter [-fh n] input [output]\n" +
			"-fh n: limit number of files to open at once during merge pass to n" +
			"\tinput: path to unsorted IQF, or directory containing one or more\n" +
				"\t\tIQF files to sort\n" +
			"\toutput: path to output IQF file(s) in (default: input + \".sorted\" for a single file,\n" +
				"\tinput + \"-s\" for a directory)\n"
		);
		System.exit(255);
	}
	
	public static void main(String[] aArgs) {
		if (aArgs.length < 1) {
			usage();
		}
		
		String input_path = null, output_path = null;
		int handle_limit = 256;
		
		for (int i = 0; i < aArgs.length; i++) {
			boolean silence_pathname_warning = false;
			
			if (aArgs[i].equals("-fh")) {
				// -fh (file handle limit) option.
				try {
					handle_limit = Integer.parseInt(aArgs[i + 1]);
				} catch (Exception e) {
					System.err.println("Option '-fh' requires an integer argument.");
					usage();
				}
				
				i++;
			} else if (aArgs[i].charAt(0) == '-') {
				System.err.println("Unrecognized option '" + aArgs[i] + "'.");
				usage();
			} else {
				// pathname in arguments list.  assign it to the first of input_path or output_path,
				// or warn if both are already assigned.
				if (input_path == null) input_path = aArgs[i];
				else if (output_path == null) output_path = aArgs[i];
				else if (!silence_pathname_warning) {
					System.err.println("Warning: unused pathname argument ignored.");
					silence_pathname_warning = true;
				}
			}
		}
		
		ArrayList<String[]> files_to_process = new ArrayList<String[]>();
		
		banner();
		
		// check that at least one of the two pathnames was supplied.
		if (input_path == null) {
			System.out.println("No input path specified.");
			usage();
		} else {
			File f = new File(input_path);
			boolean input_is_dir = false;
			
			// detect directory-ness of doc_path.	
			if (!f.exists()) {
				System.err.println("Input file does not exist.");
				System.exit(1);
			} else if (f.isDirectory()) {
				System.out.println("<== Input directory: " + f);
				input_is_dir = true;
			} else if (f.isFile()) {
				System.out.println("<== Input file: " + f);
				input_is_dir = false;
			} else {
				System.err.println("Input file exists, but is not a normal file or a directory.");
				System.exit(1);
			}
			
			// generate output path, if none provided.
			if (output_path == null) {
				if (input_is_dir) output_path = input_path + "-s";
				else output_path = input_path + ".sorted";
			}
			
			// check that the output path either doesn't exist, or is of the right type to overwrite.
			// create the output directory if we're writing a directory.
			f = new File(output_path);

			System.out.println("==> Output " + (input_is_dir ? "directory" : "file") + 
					(f.exists() ? ": " : " (creating): ") + output_path);
			
			if (!f.exists() && input_is_dir) {
				try {
					f.mkdirs();
				} catch (Exception e) {
					System.err.println("Error trying to create output directory: " + e);
					System.exit(1);
				}
			} else if (f.isDirectory() && !input_is_dir) {
				System.err.println("Output file already exists, but is a directory; refusing to overwrite it.");
				System.exit(1);
			} else if (f.isFile() && input_is_dir) {
				System.err.println("Output file already exists, but isn't a directory; refusing to overwrite it.");
				System.exit(1);
			}
			
			// generate list of input and output filenames to process.
			if (input_is_dir) {
				f = new File(input_path);
				
				String[] files_in_dir = f.list();
				
				if (files_in_dir == null) {
					System.err.println("Error getting list of files in input directory.  Halting.");
					System.exit(1);
				}
				
				for (String fname : files_in_dir) {
					files_to_process.add(new String[]
					    {input_path + "/" + fname,
					     output_path + "/" + fname});
				}
			} else {
				files_to_process.add(new String[]{input_path, output_path});
			}
		}

		// perform initial sort pass: sort all queries within each file.
		System.out.println("Beginning intrafile sort pass...");
		int n = 0;
		
		// prepare list of files to merge for the first pass at the same time.
		List<String> unmerged = new ArrayList<String>();
		
		for (String[] fnames : files_to_process) {
			unmerged.add(fnames[1]);
			
			if (sortFile(fnames[0], fnames[1])) {
				n++;
				
				if ((((double)n / files_to_process.size() * 100) % 10) < 
					(((double)(n - 1) / files_to_process.size() * 100) % 10)) {
					// print a progress message every 10%.
					System.out.println(((double)n / files_to_process.size() * 100) + "% completed (" 
							+ n + " of " + files_to_process.size() + " files).");
				}
			} else {
				System.err.println("Failed to sort file \"" + fnames[0] + "\" to " + fnames[1] + "\".");
				System.exit(1);
			}
		}
		
		System.out.println("Sort pass finished.  Beginning merge passes...");
		
		int[] merge_schedule = generateMergeSchedule(files_to_process.size(), handle_limit);
		
		System.out.println("Debug: merge schedule is " + java.util.Arrays.toString(merge_schedule));
		
		for (int pass = 0; pass < merge_schedule.length; pass++) {
			boolean error = false;
			if (error) break;
			
			int sheaves_this_pass = (pass == merge_schedule.length - 1) ? 1 : merge_schedule[pass + 1];
			ArrayList<String> unmerged_next_pass = new ArrayList<String>();
			
			System.out.println("Starting pass " + pass + ": merging " + unmerged.size() + " files into " + 
				sheaves_this_pass + " sheaves.");
			
			for (int sheaf = 0; sheaf < sheaves_this_pass; sheaf++) {
				// generate output file name, and add it to the list of unmerged files that need to be merged next pass.
				String output_fname = output_path + "/" + "merged-pass" + pass + "-sheaf" + sheaf + ".iqf";
				unmerged_next_pass.add(output_fname);
				
				int last_idx = Math.min(merge_schedule[pass], unmerged.size());
				
				String[] files_to_merge = unmerged.subList(0, last_idx).toArray(new String[0]);
				unmerged = unmerged.subList(last_idx, unmerged.size());
				
				if (!doMerge(files_to_merge, output_fname)) {
					System.err.println("Failed to merge sheaf " + sheaf + " of pass " + pass + ".");
					error = true;
					break;
				} else {
					System.out.println("Merged sheaf " + sheaf + " of pass " + pass + " (" + 
						files_to_merge.length + " files).");
				}
				
				// delete the original files.
				for (String path : files_to_merge) {
					File f = new File(path);
					
					f.delete();
				}
			}
			
			unmerged = unmerged_next_pass;
		}
	}
}

