/* Copyright 2012 Tobias Marschall
 * 
 * This file is part of CLEVER.
 * 
 * CLEVER is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * CLEVER is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with CLEVER.  If not, see <http://www.gnu.org/licenses/>.
 */

#include <iostream>
#include <fstream>
#include <fstream>
#include <vector>
#include <cassert>
#include <map>

#include <boost/math/distributions/normal.hpp>
#include <boost/program_options.hpp>
#include <bamtools/api/BamReader.h>

#include "InsertLengthDistribution.h"

using namespace std;
namespace po = boost::program_options;

typedef map<int,unsigned int> histogram_t;

void usage(const char* name, const po::options_description& options_desc) {
	cerr << "Usage: " << name << " [options]" << endl;
	cerr << endl;
	cerr << "Reads BAM format from stdin and outputs histogram of insert lengths" << endl;
	cerr << "of UNIQUELY MAPPED reads. Here, insert length refers to the area between" << endl;
	cerr << "two alignments (excluding the alignments)." << endl;
	cerr << endl;
	cerr << "IMPORTANT: Assumes that, in the BAM input, alignments belonging to the same read (pair)" << endl;
	cerr << "are grouped, i.e. are given in subsequent lines." << endl;
	cerr << endl;
	cerr << options_desc << endl;
	exit(1);
}

bool is_clipped(const BamTools::BamAlignment& alignment) {
	vector<BamTools::CigarOp>::const_iterator it = alignment.CigarData.begin();
	for (;it!=alignment.CigarData.end(); ++it) {
		switch (it->Type) {
		case 'S':
		case 'H':
		case 'P':
			return true;
		default:
			continue;
		}
	}
	return false;
}

bool process_read(const vector<BamTools::BamAlignment>& alignments1, const vector<BamTools::BamAlignment>& alignments2, histogram_t& histogram) {
	if ((alignments1.size()==1) && (alignments2.size()==1) && (alignments1[0].IsReverseStrand()!=alignments2[0].IsReverseStrand()) && (alignments1[0].RefID==alignments2[0].RefID)) {
		int insert_size = 0;
		if (alignments1[0].Position <= alignments2[0].Position) {
			insert_size = alignments2[0].Position - alignments1[0].GetEndPosition();
		} else {
			insert_size = alignments1[0].Position - alignments2[0].GetEndPosition();
		}
		if (histogram.find(insert_size) == histogram.end()) {
			histogram[insert_size] = 1;
		} else {
			histogram[insert_size] += 1;
		}
		return true;
	}
	return false;
}

/** Compute mean and stddev of the Gaussian making up the main peak (based on 0.25, 0.5, and 0.75 quantile). */
void compute_mean_and_stddev(const histogram_t& histogram, long long total_count, double* mean, double* sd) {
	// first determine the 0.25, 0.5, and 0.75 quantile
	histogram_t::const_iterator it = histogram.begin();
	long long cumulated_count = 0;
	double q25 = 0;
	bool q25_set = false;
	double q50 = 0;
	bool q50_set = false;
	double q75 = 0;
	for (; it != histogram.end(); ++it) {
		long long new_cumulated_count = cumulated_count + it->second;
		if (!q25_set && (new_cumulated_count >= 0.25*total_count)) {
			q25 = ((double)it->first) - 0.5 + (0.25*total_count - cumulated_count) / it->second;
			q25_set = true;
		}
		if (!q50_set && (new_cumulated_count >= 0.50*total_count)) {
			q50 = ((double)it->first) - 0.5 + (0.50*total_count - cumulated_count) / it->second;
			// q50 = it->first;
			q50_set = true;
		}
		if (new_cumulated_count >= 0.75*total_count) {
			q75 = ((double)it->first) - 0.5 + (0.75*total_count - cumulated_count) / it->second;
			// q75 = it->first;
			break;
		}
		cumulated_count = new_cumulated_count;
	}
	*mean = q50;
	*sd = (q75 - q25) / 2 / 0.6744898;
}

double normal_mass(int from, int to, const boost::math::normal& norm) {
	return boost::math::cdf(norm,((double)to) + 0.5) - boost::math::cdf(norm,((double)from) - 0.5);
}

int main(int argc, char* argv[]) {
	// PARAMETERS
	int min_counts_per_bin = 20;
	string insert_length_histogram = "";
	string mean_sd_filename = "";
	bool gaussian_values = false;

	po::options_description options_desc("Allowed options");
	options_desc.add_options()
		("min_count_per_bin,c", po::value<int>(&min_counts_per_bin)->default_value(20), "Minimum number of counts per bin. If necessary, bins are joined to reach this number of counts.")
		("mean_and_sd,m", po::value<string>(&mean_sd_filename), "Write (robustly estimated) mean and standard deviation of main peak to given filename.")
		("gaussian_values,g", po::value<bool>(&gaussian_values)->zero_tokens(), "Print forth column: probability based on Gaussian estimated from main peak.")
	;
	
	if (isatty(fileno(stdin))) {
		usage(argv[0], options_desc);
	}

	po::variables_map options;
	try {
		po::store(po::parse_command_line(argc, argv, options_desc), options);
		po::notify(options);
	} catch(exception& e) {
        cerr << "Error: " << e.what() << "\n";
        return 1;
    }

	if (min_counts_per_bin<1) {
		cerr << "Error: option -c: integer >0 expected, but got " << min_counts_per_bin << "." << endl;
	}

	BamTools::BamReader bam_reader;
	if (!bam_reader.Open("/dev/stdin")) {
		cerr << "Error opening BAM input from /dev/stdin" << endl;
		return 1;
	}

	string last_read_name = "";
	// alignments of the currently processed read pair
	vector<BamTools::BamAlignment> alignments1, alignments2;
	BamTools::BamAlignment read_aln;
	bool skip_this_read = false;
	// number of all alignments
	long long alignment_counter = 0;
	// number of alignments used in histogram
	long long total_count = 0;
	histogram_t histogram;
	while ( bam_reader.GetNextAlignment(read_aln) ) {
		alignment_counter += 1;
		if (alignment_counter % 1000000 == 0) {
			cerr << "Having processed " << alignment_counter << " read alignments" << endl;
		}
		assert(read_aln.IsFirstMate() != read_aln.IsSecondMate());
		if (alignment_counter==1) {
			last_read_name = read_aln.Name;
		}
		if (last_read_name.compare(read_aln.Name) != 0) {
			if (!skip_this_read) {
				if (process_read(alignments1,alignments2,histogram)) {
					total_count += 1;
				}
			}
			alignments1.clear();
			alignments2.clear();
			skip_this_read = false;
			last_read_name = read_aln.Name;
		}
 		if (!read_aln.IsMapped()) skip_this_read = true;
		// Exclude reads with a mapping quality of 0
 		if (read_aln.MapQuality == 0) skip_this_read = true;
 		if (is_clipped(read_aln)) skip_this_read = true;
 		uint32_t x0_tag = -1;
 		uint32_t x1_tag = -1;
 		string xa_tag = "";
 		if (read_aln.GetTag("X0",x0_tag)) {
 			if (x0_tag>1) skip_this_read = true;
 		}
 		if (read_aln.GetTag("X1",x1_tag)) {
 			if (x1_tag>0) skip_this_read = true;
 		}
 		if (read_aln.GetTag("XA",xa_tag)) {
 			skip_this_read = true;
 		}
 		if (read_aln.IsFirstMate()) {
 			if (alignments1.size()>0) {
 				skip_this_read = true;
 			} else {
 				alignments1.push_back(read_aln);
 			}
 		}
 		if (read_aln.IsSecondMate()) {
 			if (alignments2.size()>0) {
 				skip_this_read = true;
 			} else {
 				alignments2.push_back(read_aln);
 			}
 		}
	}
	if (!skip_this_read && process_read(alignments1,alignments2,histogram)) {
		total_count += 1;
	}
	double mean;
	double sd;
	compute_mean_and_stddev(histogram, total_count, &mean, &sd);
	cerr << "Main peak of internal segment length distribution: mean " << mean << ", sd " << sd << endl;
	if (mean_sd_filename.compare("") != 0) {
		ofstream ofs(mean_sd_filename.c_str());
		ofs << mean << " " << sd << endl;
		ofs.close();
	}
	boost::math::normal norm(mean,sd);
	// ------- OUTPUT -------
	// number of alignments in current bin
	int count = 0;
	// sum of counts until now
	int cumulated_count = 0;
	histogram_t::const_iterator it = histogram.begin();
	int interval_start = it->first;
	int last = -1;
	for (; it != histogram.end(); ++it) {
		count += it->second;
		cumulated_count += it->second;
		if ((count>min_counts_per_bin) && (total_count-cumulated_count>min_counts_per_bin)) {
			cout << interval_start << " " << it->first << " " << double(count)/total_count;
			if (gaussian_values) {
				cout << " " << normal_mass(interval_start,it->first,norm);
			}
			cout << endl;
			count = 0;
			interval_start = it->first + 1;
		}
		last = it->first;
	}
	if (count>0) {
		cout << interval_start << " " << last << " " << double(count)/total_count;
		if (gaussian_values) {
			cout << " " << normal_mass(interval_start,last,norm);
		}
		cout << endl;
	}
	return 0;
}
