'''
Created on Jan 19, 2011

@author: mkiyer
'''
'''
Created on Dec 21, 2010

@author: mkiyer
'''
import argparse
import collections
import logging
import operator
import os
import re
import sys

import pysam

import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
#from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
#from matplotlib.figure import Figure
import numpy as np

mismatch_re = re.compile(r"(\d+)([agtcAGTC]*)")

class AlignmentStats(object):
    
    @staticmethod
    def from_sam(samfh):
        a = AlignmentStats()
        # initialize statistics variables
        a.hits = 0
        a.reads = 0.0
        a.duplicate_reads = 0.0
        a.unmapped_reads = 0.0
        a.mate_unmapped_reads = 0.0
        a.qcfail_reads = 0.0
        a.strand_reads = [0.0, 0.0]
        a.read_lengths = collections.defaultdict(lambda: 0.0)
        multihit_counts = collections.defaultdict(lambda: 0.0)
        cycle_mismatch_counts = collections.defaultdict(lambda: 0.0)
        read_mismatch_counts = collections.defaultdict(lambda: 0.0)
        for read in samfh:
            a.hits += 1
            num_read_hits = 0
            if read.is_unmapped:
                a.unmapped_reads += 1
            if read.is_qcfail:
                a.qcfail_reads += 1
            if read.mate_is_unmapped:
                a.mate_unmapped_reads += 1
    
            if not (read.is_unmapped or read.is_qcfail):
                tag_dict = dict(read.tags)
                if 'NH' in tag_dict:
                    num_read_hits = tag_dict['NH']
                else:
                    num_read_hits = 1
                # count reads (where one read may have multiple alignment hits)
                # by dividing by the number of hits for this read
                weighted_cov = 1.0 / num_read_hits
                a.reads += weighted_cov            
                # number of multihits per read
                multihit_counts[num_read_hits] += 1
                # duplicate reads
                if read.is_duplicate:
                    a.duplicate_reads += weighted_cov
                # keep track of reads of various lengths
                a.read_lengths[read.rlen] += weighted_cov
                # keep track of strand balance
                strand = int(read.is_reverse)
                a.strand_reads[strand] += weighted_cov
                # mismatch counts along read
                mdtag = read.opt('MD')
                #print 'MD', mdtag
                mm_iter = mismatch_re.finditer(read.opt('MD'))
                offset = 0
                for mismatch in mm_iter:
                    skip, mmbase = mismatch.groups()
                    offset += int(skip)
                    if mmbase:
                        pos = (read.rlen - offset - 1) if read.is_reverse else offset
                        cycle_mismatch_counts[pos] += weighted_cov
                        #print 'is reverse', strand, 'skip', skip, 'offset', offset, 'pos', pos
                        offset += 1
                #print mdtag, read.seq, read.qual
                #print '-------'
                #print read
                # total mismatches per read
                read_mismatch_counts[read.opt('NM')] += weighted_cov
    
        # report statistics
        logging.info("Number of alignment hits=%d" % (a.hits))
        logging.info("Number of reads=%d" % (int(a.reads)))
        logging.info("Number of read flagged as duplicates=%d" % (int(a.duplicate_reads)))
        logging.info("umber of qc fail reads=%d" % (int(a.qcfail_reads)))
        logging.info("Number of mapped reads=%d" % (int(a.reads-a.unmapped_reads)))
        logging.info("Number of unmapped reads=%d" % (a.unmapped_reads))
        logging.info("Number of reads with unmapped mate=%d" % (a.mate_unmapped_reads))
        # set to the most abundant read length in the case of variable read length runs
        major_read_length = int(sorted(a.read_lengths.items(), key=operator.itemgetter(1), reverse=True)[0][0])
        logging.info("Most common read length=%d" % (major_read_length))
        logging.info("Number of different read lengths=%d" % (len(a.read_lengths)))

        max_multihits = max(multihit_counts.keys())
        arr = np.zeros(max_multihits, dtype=np.int)
        for i in xrange(max_multihits):
            if i in multihit_counts:
                arr[i] = int(multihit_counts[i])    
        a.read_multihits = list(arr)
        logging.info("Multimapping read counts=%s" % (a.read_multihits))
    
        max_rlen = max(a.read_lengths.keys())
        arr = np.zeros(max_rlen, dtype=np.int)
        for i in xrange(max_rlen):
            if i in cycle_mismatch_counts:
                arr[i] = int(cycle_mismatch_counts[i])
        a.cycle_mismatches = list(arr)
        logging.info("Mismatches per cycle=%s" % (a.cycle_mismatches))
    
        max_mismatches = max(read_mismatch_counts.keys())
        arr = np.zeros(max_mismatches, dtype=np.int)
        for i in xrange(max_mismatches):
            if i in read_mismatch_counts:
                arr[i] = int(read_mismatch_counts[i])
        a.read_mismatches = list(arr)
        logging.info("Mismatches per read=%s" % (a.read_mismatches))
        return a

    def mismatch_report(self, cycle_output_file, read_output_file):
        mm_per_cycle = np.array(self.cycle_mismatches)
        print mm_per_cycle
        print 'total', sum(mm_per_cycle)
        print self.reads        
        plt.plot(mm_per_cycle / float(self.reads), '-o')
        plt.xlabel("Cycle")
        plt.ylabel("Fraction of reads with mismatch")
        plt.title("Mismatches per cycle")
        plt.savefig(cycle_output_file)    
        #plt.show()
        mm_per_read = np.array(self.read_mismatches)
        print mm_per_read
        print 'total', sum(i*mm_per_read[i] for i in xrange(1,len(mm_per_read)))
        print self.reads
        frac_mm_per_read = mm_per_read / float(self.reads)
        frac_mm_per_read = frac_mm_per_read[frac_mm_per_read > 0.001]
        plt.clf()
        plt.plot(frac_mm_per_read, '-o')
        plt.xlabel("Number of mismatches")
        plt.ylabel("Fraction of reads")
        plt.title("Number of mismatches per read")
        plt.savefig(read_output_file)    
        #plt.show()    

def main():
    logging.basicConfig(level=logging.DEBUG,
                        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    parser = argparse.ArgumentParser()
    parser.add_argument("bam_file")
    parser.add_argument("cycle_output_file")
    parser.add_argument("read_output_file")
    options = parser.parse_args()
    bamfh = pysam.Samfile(options.bam_file, "rb")
    a = AlignmentStats.from_sam(bamfh)
    a.mismatch_report(options.cycle_output_file, options.read_output_file)

if __name__ == '__main__': main()