/* Copyright 2012 Tobias Marschall
 * 
 * This file is part of CLEVER.
 * 
 * CLEVER is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * CLEVER is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with CLEVER.  If not, see <http://www.gnu.org/licenses/>.
 */

#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <limits>
#include <cassert>

#include <boost/program_options.hpp>
#include <boost/tokenizer.hpp>
#include <boost/lexical_cast.hpp>

#include "AlignmentPair.h"
#include "PackedAlignmentPair.h"
#include "EdgeCalculator.h"
#include "CliqueFinder.h"
#include "CliqueWriter.h"
#include "CoverageMonitor.h"
#include "CoverageWriter.h"

using namespace std;
using namespace boost;
namespace po = boost::program_options;

void usage(const char* name, const po::options_description& options_desc) {
	cerr << "Usage: " << name << " [options] <gaussian-file>" << endl;
	cerr << endl;
	cerr << "<gaussian-file> is a file containing one line with mean and standard deviation" << endl;
	cerr << "                of the normal distribution used as null model for internal segment length." << endl;
	cerr << "                Such a file can be generated using option -m of insert-length-histogram." << endl;
	cerr << "                Note that the \"internal segment\" does NOT include the read (ends), i.e." << endl;
	cerr << "                a fragment disjointly consists of two reads (read ends) and an internal segment." << endl;
	cerr << endl;
	cerr << "Reads alignment pairs from stdin in computes all cliques. Expected format:" << endl;
	cerr << "<read-name> <pair-nr> <phred-sum1> <chrom1> <start1> <end1> <strand1> <phred-sum2> <chrom2> <start2> <end2> <strand2> <aln-pair-prob> <aln-pair-prob-inslength>" << endl;
	cerr << endl;
	cerr << "NOTE: Alignment pairs are assumed to be ordered by field 6 (end1)." << endl;
	cerr << endl;
	cerr << "Outputs all significant variant predictions (sorted by p-value) after controlling FDR." << endl;
	cerr << endl;
	cerr << options_desc << endl;
	exit(1);
}

bool read_mean_and_sd(const string& filename, double* mean, double* sd) {
	typedef boost::tokenizer<boost::char_separator<char> > tokenizer_t;
	boost::char_separator<char> whitespace_separator(" \t");
	ifstream in(filename.c_str());
	string line;
	if (in.fail() || (!getline(in,line))) {
		return false;
	}
	tokenizer_t tokenizer(line,whitespace_separator);
	vector<string> tokens(tokenizer.begin(), tokenizer.end());
	if (tokens.size() != 2) {
		return false;
	}
	try {
		*mean = boost::lexical_cast<double>(tokens[0]);
		*sd = boost::lexical_cast<double>(tokens[1]);
	} catch(boost::bad_lexical_cast &){
		return false;
	}
	return true;
}

int main(int argc, char* argv[]) {
	// PARAMETERS
	double edge_sig_level;
	double min_aln_weight;
	double max_insert_length;
	int max_coverage;
	string edge_filename;
	double fdr;
	string reads_output_filename;
	string coverage_output_filename;
	bool verbose = false;

	po::options_description options_desc("Allowed options");
	options_desc.add_options()
	    ("verbose,v", po::value<bool>(&verbose)->zero_tokens(), "Be verbose: output additional statistics for each variation.")
		("edge_sig_level,p", po::value<double>(&edge_sig_level)->default_value(0.2), "Significance level for edges (the lower the level, the more edges will be present).")
		("min_aln_weight,w", po::value<double>(&min_aln_weight)->default_value(0.0016), "Minimum weight of alignment pairs to be considered.")
		("max_insert_length,l", po::value<double>(&max_insert_length)->default_value(50000), "Maximum insert length of alignments to be considered (0=unlimited).")
		("max_coverage,c", po::value<int>(&max_coverage)->default_value(500), "Maximum allowed coverage. If exceeded, violating reads are ignored. The number of such ignored reads is printed to stderr (0=unlimited).")
		("write_edges,e", po::value<string>(&edge_filename)->default_value(""), "Write edges to file of given name.")
		("fdr,f", po::value<double>(&fdr)->default_value(0.1), "False discovery rate (FDR).")
		("all,a", "Output all cliques instead of only the significant ones. Cliques are not sorted and last column (FDR) is not computed.")
		("output_reads,r", po::value<string>(&reads_output_filename)->default_value(""), "Output reads belonging to at least one significant clique to the given filename (along with their assignment to significant cliques.")
		("output_coverage,C", po::value<string>(&coverage_output_filename)->default_value(""), "Output the coverage with considered insert segments along the chromosome (one line per position) to the given filename.")
	;
	
	if (isatty(fileno(stdin)) || (argc<2)) {
		usage(argv[0], options_desc);
	}
	string mean_stddev_filename(argv[argc-1]);
	argc -= 1;

	po::variables_map options;
	try {
		po::store(po::parse_command_line(argc, argv, options_desc), options);
		po::notify(options);
	} catch(std::exception& e) {
		cerr << "error: " << e.what() << "\n";
		return 1;
	}
	bool output_all = options.count("all")>0;

	if (output_all && (reads_output_filename.size()>0)) {
		cerr << "Error: options -a and -r are mutually exclusive." << endl;
		return 1;
	}

	double insert_mean = -1.0;
	double insert_stddev = -1.0;
	if (!read_mean_and_sd(mean_stddev_filename, &insert_mean, &insert_stddev)) {
		cerr << "Error reading \"" << mean_stddev_filename << "\"." << endl;
		return 1;
	}
	cerr << "Null distribution: mean " << insert_mean << ", sd " <<  insert_stddev << endl;

	EdgeCalculator edge_calculator(edge_sig_level,insert_mean,insert_stddev);
	VariationCaller variation_caller(insert_mean, insert_stddev);
	CliqueWriter clique_writer(cout, variation_caller, output_all, fdr, verbose);
	CliqueFinder clique_finder(edge_calculator, clique_writer);
	EdgeWriter* edge_writer = 0;
	ofstream* edge_ofstream = 0;
	if (edge_filename.size()>0) {
		edge_ofstream = new ofstream(edge_filename.c_str());
		edge_writer = new EdgeWriter(*edge_ofstream);
		clique_finder.setEdgeWriter(*edge_writer);
	}
	ofstream* reads_ofstream = 0;
	if (reads_output_filename.size()>0) {
		reads_ofstream = new ofstream(reads_output_filename.c_str());
		clique_writer.enableReadListOutput(*reads_ofstream);
	}
	CoverageWriter* coverage_writer = 0;
	if (coverage_output_filename.size()>0) {
		coverage_writer = new CoverageWriter(coverage_output_filename);
	}

	size_t last_pos = 0;
	int n = 0;
	string line;
	size_t skipped_by_weight = 0;
	size_t skipped_by_length = 0;
	size_t skipped_by_coverage = 0;
	size_t valid_alignments = 0;
	size_t total_alignments = 0;
	while (getline(cin, line)) {
		n += 1;
		total_alignments += 1;
		try {
			AlignmentPair ap(line);
			if (ap.getEnd1()+1 > ap.getStart2()-1) continue;
			if (ap.getChrom1().compare(ap.getChrom2()) != 0) continue;
			if (ap.getStrand1().compare(ap.getStrand2()) == 0) continue;
			if (ap.getEnd1()<last_pos) {
				cerr << "Error: Input is not ordered by position (field 6)! Offending line: " << n << endl;
				return 1;
			}
			valid_alignments += 1;
			last_pos = ap.getEnd1();
			auto_ptr<PackedAlignmentPair> alignment_autoptr(new PackedAlignmentPair(ap));
			if (max_insert_length>0) {
				if (alignment_autoptr->getInsertLength() > max_insert_length) {
					skipped_by_length += 1;
					continue;
				}
			}
			if (alignment_autoptr->getWeight() < min_aln_weight) {
				// cout << "Skipping alignment (weight): "  << alignment_autoptr->getName() << " weight: " << alignment_autoptr->getWeight() << endl;
				skipped_by_weight += 1;
				continue;
			}
			if (max_coverage>0) {
				if (clique_finder.getCoverageMonitor().probeAlignment(*alignment_autoptr) > (size_t)max_coverage) {
					// cout << "Skipping alignment (coverage): "  << alignment_autoptr->getName()  << endl;
					skipped_by_coverage += 1;
					continue;
				}
			}
			if (coverage_writer != 0) {
				coverage_writer->addAlignment(*alignment_autoptr);
			}
			clique_finder.addAlignment(alignment_autoptr);
		} catch (std::runtime_error&) {
			cerr << "Error parsing input, offending line: " << n << endl;
			return 1;
		}
	}
	clique_finder.finish();
	clique_writer.finish();

	cerr << "Total alignments: " << total_alignments << endl;
	cerr << "Valid alignments: " << valid_alignments << endl;
	cerr << "Alignments with too low weight (skipped): " << skipped_by_weight << endl;
	cerr << "Alignments with too large insert length (skipped): " << skipped_by_length << endl;
	cerr << "Alignments skipped due to coverage constraints: " << skipped_by_coverage << endl;
	cerr << "Total number of cliques: " << clique_writer.getTotalCount() << endl;

	if (edge_writer != 0) {
		delete edge_writer;
		delete edge_ofstream;
	}
	if (reads_ofstream!=0) {
		delete reads_ofstream;
	}
	if (coverage_writer != 0) {
		coverage_writer->finish();
		delete coverage_writer;
	}
	return 0;
}
