'''
Created on Nov 24, 2009

@author: mkiyer
'''
import ruffus
from optparse import OptionParser
import glob
import logging
import os
import re
import sys
import pickle
import subprocess

import veggie.db.refdb as refdb
import veggie.alignment.sam as samtools
from veggie.app.rnaseq.cufflinks import CuffCompare, Cufflinks
from veggie.app.rnaseq.tophat import TopHat
import veggie.db.sample as sdb 
from veggie.db.sample.samplegroup import parse_samplegroups_xml

def generate_libraries():
    """
    returns one list of parameters per job
    """
    logger = logging.getLogger(__name__)

    sample_xmlfile = "samples.xml"
    config_xmlfile = "config.xml"
    assert os.path.exists(sample_xmlfile)
    assert os.path.exists(config_xmlfile)
    # TODO: add path to sampledb instance to config file
    sdbi_path = '/home/bigwig/sdb/sampledb.pickle'
    # TODO: add qc_filter to config file
    qc_filter = False
    # load the current sampledb snapshot
    sdbi = sdb.get_sampledb_instance(sdbi_path)
    # keep track of duplicate library IDs that signify errors in SampleDB
    library_ids = set()
    # parse the sample xml file and retrieve all the desired samples
    for sample_group in parse_samplegroups_xml(sample_xmlfile):
        for sample_name in sample_group.samples:
            # get the libraries associated with this sample
            libraries = sdbi.get_libraries_by_sample_name(sample_name, best=True)
            for library in libraries:
                if library.id in library_ids:
                    logger.error('%s %s: library already exists with different sample' % (sample_name, library.id))
                    continue
                library_ids.add(library.id)
                # filter poor quality samples
                if library.qc_status == False:
                    logger.warning('library %s: marked QC-FAIL' % library.id)
                    if qc_filter:
                        logger.warning('library %s: excluded from query' % library.id)
                        continue
#                if not library.is_paired_end():
#                    logger.warning('%s: skipping single read library, currently only paired-end supported' % (library.id))
#                    continue                
                logger.info('%s %s: chose best library %s' % (sample_name, library.id, library))
                # generate params for setup pipeline
                analysis_cwd = library.id      
                library_file = os.path.join(analysis_cwd, "library.pickle")                
                yield ["samples.xml", library_file, analysis_cwd, library]

@ruffus.files(generate_libraries)
def setup_pipeline(inputs, output_file, analysis_cwd, library):
    logger = logging.getLogger(__name__)
    # make directory for results
    if not os.path.exists(analysis_cwd):
        logger.debug('%s: creating analysis results path %s' % (library.id, os.path.abspath(analysis_cwd)))
        os.makedirs(analysis_cwd)
    # compute the mate inner distance parameter for the library
    inner_dist = library.insert_size - library.read_length
    if inner_dist <= 0:
        logger.warning("%s: (inner_dist < 0) insert_size: %d read_length: %d inner_dist: %d" % 
                       (library.id, library.insert_size, library.read_length, inner_dist))
        inner_dist = 200 - library.read_length
        logger.warning("%s: setting inner_dist to (200 - read_length)=%d" % (library.id, inner_dist))
    elif inner_dist < 100:
        logger.warning("%s: (inner_dist < 100) insert_size: %d read_length: %d inner_dist: %d" % 
                        (library.id, library.insert_size, library.read_length, inner_dist))
    # add the inner_dist attribute to the library object   
    library.inner_dist = inner_dist
    # pickle the library into the directory for subsequent tasks
    library_file = os.path.join(analysis_cwd, "library.pickle")
    pickle.dump(library, open(library_file, "w"))

def run_tophat(analysis_id, read_type, fastq_files, quals_param, inner_dist, analysis_cwd, processors=2):
    logger = logging.getLogger(__name__ + ':run_tophat')
    # skip alignments that already exist
    logger.debug("%s: preparing to run TopHat on file(s): %s quals: %s inner_dist: %d" % 
                 (analysis_id, fastq_files, quals_param, inner_dist))      
    # options
    junction_gff3_path = refdb.get_ucsc_table('genes_combined', 'gff3')
    assert os.path.exists(junction_gff3_path)
    
    # generic tophat options
    tophat_options = {'-p': processors,
                      '--output-dir': analysis_cwd,
                      '--' + quals_param: None,
                      '-g': 40,
                      '-G': junction_gff3_path}
#                      '-g': 1,
#                      '--butterfly-search': None}
    # TODO: replace string with enum type for single_read
    if read_type == 'single_read':
        # create the app controller
        tophat_app = TopHat(options=tophat_options)
        res = tophat_app.run_single_read(fastq_files[0],
                                         output_dir=analysis_cwd, 
                                         genome="hg18")
    elif read_type == 'paired_end':
        tophat_options.update({'--mate-inner-dist': inner_dist,
                               '--mate-std-dev': 20,
                               '--min-intron-length': 60})
        tophat_app = TopHat(options=tophat_options)
        res = tophat_app.run_paired_end(fastq_files[0], fastq_files[1],
                                        output_dir=analysis_cwd, 
                                        genome="hg18")
    else:
        logging.error("%s: Unknown read type %s" % (analysis_id, read_type))
    logging.debug("%s: TopHat finished" % (analysis_id))
    return res

@ruffus.transform(setup_pipeline, ruffus.regex(r"(.*)/library.pickle$"), r"\1/accepted_hits.sam", r"\1")
def tophat_task(inlibrary, outsamfile, analysis_cwd):
    # retrieve pickled library
    library = pickle.load(open(inlibrary, 'rb'))
    analysis_id = library.id
    # setup logging
    logger = logging.getLogger(__name__ + analysis_id)
    # run tophat
    # TODO: extract additional params from a config file
    res = run_tophat(analysis_id,
                     library.read_type,
                     library.fastq_files, 
                     library.fastq_format,
                     library.inner_dist, 
                     analysis_cwd, 
                     processors=2)

@ruffus.transform(tophat_task, ruffus.regex(r"(.*)/accepted_hits.sam$"), r"\1/aligned_reads.bam", r"\1")
def process_tophat_output_task(insamfile, outsamfile, analysis_cwd):
    # setup logging
    logger = logging.getLogger(__name__)
    # process the SAM file
    tophat_header_file = refdb.get_tophat_sam_header_file()
    bamfile = samtools.process_tophat_sam(insamfile, 
                                          outsamfile, 
                                          tophat_header_file, 
                                          rmdup=True, 
                                          tmp_path=analysis_cwd, 
                                          keep_tmp=False)
    if bamfile is None:
        logger.critical('%s: error preparing BAM file' % os.getpid())
        return

@ruffus.transform(process_tophat_output_task, ruffus.regex(r"(.*)/aligned_reads.bam$"), r"\1/transcripts.gtf", r"\1")
def cufflinks_task(inbamfile, transcripts_gtf, analysis_cwd):
    # retrieve pickled library
    inlibrary = os.path.join(analysis_cwd, "library.pickle")
    library = pickle.load(open(inlibrary, 'rb'))
    analysis_id = os.path.basename(analysis_cwd)
    # TODO: read configuration from a file
    processors = 2
    # write BAM to a temporary SAM file
    tmpsamfile = 'tmp.sam'
    tmpsampath = os.path.join(analysis_cwd, tmpsamfile)
    p1 = subprocess.Popen(["samtools", "view", inbamfile, "-o", tmpsampath])
    p1.wait()
    # setup and run cufflinks
    logging.debug("%s: preparing to run Cufflinks with inner_dist %d" % 
                  (analysis_id, library.inner_dist))
    cufflinks_app = Cufflinks(cwd=analysis_cwd)
    retcode, res = cufflinks_app.run(tmpsamfile, 
                                     library.inner_dist, 
                                     num_threads=processors, 
                                     label=analysis_id)
    logging.debug("%s: Cufflinks returned %d" % (analysis_id, retcode))
    # get rid of the temporary file
    os.remove(tmpsampath)
    
@ruffus.transform(cufflinks_task, ruffus.regex(r"(.*)/transcripts.gtf$"), r"\1/transcripts.tmap", r"\1")
def cuffcompare_task(transcripts_gtf, transcripts_tmap, cwd):
    # setup and run cuffcompare
    analysis_id = os.path.basename(cwd)
    app = CuffCompare(cwd=cwd)
    # TODO: add to config file
    ref_gtf_file = refdb.get_ucsc_table('ensembl', 'gtf')
    outfile = os.path.splitext(os.path.basename(ref_gtf_file))[0] + '_stats.txt'
    retcode, result_paths = app.run(os.path.basename(transcripts_gtf),
                                    ref_gtf_file=ref_gtf_file,
                                    outfile=outfile)
    logging.debug("%s: %s returned %s" % (analysis_id, 'cuffcompare', str(retcode)))
    return


if __name__ == '__main__':
    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
                        level=logging.DEBUG)
    logger = logging.getLogger(__name__)
    
    # parse command line arguments
    optionparser = OptionParser("usage: %prog [options] <results_path>")
#    optionparser.add_option("-s", "--samples", dest="samples", default="samples.xml",
#                            help="process only the samples defined in the sample XML file")
#    optionparser.add_option("-c", "--config", dest="config", default="config.xml",
#                            help="configuration for the different pipeline tasks")
    optionparser.add_option("-p", "--processors", dest="processors", type="int", default=1,
                            help="process only the samples defined in the sample XML file")                       
    (options, args) = optionparser.parse_args()
    # validate command line arguments
    # TODO: use command line samples.xml, config.xml
    #if options.samples is None:
    #    optionparser.error("No samples specified")
    #sample_xmlfile = options.samples
    if len(args) == 0 or (not os.path.exists(args[0])):
        optionparser.error("Results path does not exist")
    results_path = args[0]

    sample_xmlfile = "samples.xml"
    config_xmlfile = "config.xml"
    assert os.path.exists(sample_xmlfile)
    assert os.path.exists(config_xmlfile)
    
    logger.info('RNA-seq pipeline version 0.0.2')
    logger.info('------------------------------')
    logger.info('sample xml file: %s', sample_xmlfile)
    logger.info('config xml file: %s', config_xmlfile)
    logger.info('results_path: %s', results_path)

    # change directory to results path for subsequent steps
    os.chdir(results_path)    
    # run the pipeline
    ruffus.pipeline_run([cuffcompare_task], verbose=2, multiprocess=options.processors)

