'''
Created on Jan 29, 2011

@author: oabalbin
'''
import os
import logging
import subprocess
from optparse import OptionParser
from collections import defaultdict, deque
import exome.gatk_cluster.picard_commands_cluster as mypicard
from exome.jobs.config import ExomeLaneConfig, ExomePipelineConfig, ExomeAnalysisConfig
from exome.jobs.job_runner import qsub_cac, qsub_loc, run_local
from exome.jobs.base import JOB_SUCCESS, JOB_ERROR, up_to_date,job_done,up_to_date_job

# Global Variables
NODE_MEM=45000.0
NODE_CORES=12
SINGLE_CORE=1
MEM_PER_CORE= int(float(NODE_MEM) / NODE_CORES)
# wt=walltime
WT_SHORT= "24:00:00"
WT_LONG= "100:00:00" #"100:00:00"

    
def check_create_dir(root_path, dir_name=None):
    '''
        if not os.path.isdir(full_path_name):
            os.mkdir( full_path_name )
    '''
    if not os.path.isdir(root_path):
            os.mkdir( root_path )
    if dir_name is not None:
        subfolder=os.path.join(root_path,dir_name)
        if not os.path.isdir(subfolder):
                os.mkdir( subfolder )
    else:             
        subfolder=root_path

    return subfolder

def bam_cleaner_lane(lane_file, lane_root_dir, config_file, deps, jobfunc):
    '''
    It performs a cleaning of the original bam file
    '''
    config = ExomePipelineConfig()
    config.from_xml(config_file)
    job = ExomeLaneConfig()
    job.from_xml(lane_file, lane_root_dir)
    

    extra_mem, path_to_picard = config.gatk_use_mem, config.picard_path
    my_email = config.email_addresses
    
    if not deps:
        deps=None
    outdeps=[]
    jname=job.name
    check_create_dir(job.gatk_dir)
    
    command = mypicard.markDuplicates(job.align_bam_file, job.markdup_bam_file, extra_mem, path_to_picard)
    outdeps.extend( jobfunc(jname+'_md', command, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=extra_mem, 
                      deps=deps, stdout=None, email_addresses=my_email) )
    
    return outdeps

def bam_cleaner_samples(analysis, configrun, num_processors, jobrunfunc, deps):
    '''
    It cleans and merged all the lane files belonging to the 
    same sample
    '''
    #### Header Run parameters
    
    extra_mem, num_cores = configrun.gatk_use_mem, configrun.gatk_num_cores
    path_to_picard = configrun.picard_path
    my_email=configrun.email_addresses
    
    # cluster parameters        
    # Lists
    sample_lanes_dict=defaultdict(deque)
    jobidps=[]
    for sp in analysis.samples:
        sp_name=sp.name
        sp.output_dir
        for i,thlane in enumerate(sp.lanes):
            sample_lanes_dict[sp_name].append(thlane.align_bam_file)
        
        jname='cl'+sp_name
        sample_lanes = sample_lanes_dict[sp_name]
        #out_dir outdir for the analysis
        sample_raw_merged_bam = sp.merged_raw_bam
        # Merge realigned, recalibrated lanes that belong to sample sp
        #command = mysam.merge_bam_files(list(recalibrated_lanes), sample_recal_merged_bam, path_to_sam)
        if up_to_date(sample_raw_merged_bam,sample_raw_merged_bam):
            logging.info("[SKIPPED] SAM BAM CLEANER. File %s file is up to date" % (sample_raw_merged_bam))
            jobimd=[]
            jobimd.extend(deps)
        else:
            if len(sample_lanes)!=1:
                logging.info("SAM BAM CLEANER. Merged File %s file is not up to date" % (sample_raw_merged_bam))
                command = mypicard.picard_mergebam(list(sample_lanes), sample_raw_merged_bam, MEM_PER_CORE, path_to_picard)
                jobimd = jobrunfunc(jname+'mb', command, SINGLE_CORE, cwd=sp.output_dir, walltime=WT_SHORT, pmem=None, 
                                      deps=deps, stdout='sam_merged_lanes.log', email_addresses=my_email)
            else:
                logging.info("SAM BAM CLEANER. Create symbolic link because there is only one lane File %s file and is up to date" % (sample_raw_merged_bam))
                print sample_lanes,list(sample_lanes),sample_lanes[0]
                args=['ln','-s',sample_lanes[0],sample_raw_merged_bam,'&','ln','-s',sample_lanes[0].replace('.bam','.bai'),sample_raw_merged_bam.replace('.bam','.bai')]
                print args
                command = ",".join(args).replace(',',' ').replace('&','\n')
                
                jobimd = jobrunfunc(jname+'mb', command, SINGLE_CORE, cwd=sp.output_dir, walltime=WT_SHORT, pmem=None, 
                                      deps=deps, stdout='sam_merged_lanes.log', email_addresses=my_email)
                
        
        merged_dedup_bam=sp.merged_mmdup_raw_bam
        if up_to_date(merged_dedup_bam,merged_dedup_bam):
            logging.info("[SKIPPED] SAM BAM CLEANER. Mark duplicates step. File %s file is up to date" % (merged_dedup_bam))
            jobidmmd=[]
            jobidmmd.extend(jobimd)
        else:       
            # Mark duplicates in the merged Bam file\n
            logging.info("SAM BAM CLEANER. Mark duplicates step. File %s file is not up to date" % (merged_dedup_bam))
            command = mypicard.markDuplicates(sample_raw_merged_bam, merged_dedup_bam, 
                                                                MEM_PER_CORE, path_to_picard)
            
            jobidmmd = jobrunfunc(jname+'mmd', command, SINGLE_CORE, cwd=sp.output_dir, walltime=WT_SHORT, pmem=None, 
                                  deps=jobimd, stdout='sam_mdup.log', email_addresses=my_email)            
        
        sorted_mdedup_bam = sp.sorted_quickmmdup_bam
        if up_to_date(sorted_mdedup_bam,sorted_mdedup_bam):
            logging.info("[SKIPPED] SAM BAM CLEANER. Sort and Index step. File %s file is up to date" % (sorted_mdedup_bam))
            jobidpss=[]
            jobidpss.extend(jobimd)
        else:
            logging.info("SAM BAM CLEANER. Sort and Index step. File %s file is not up to date" % (sorted_mdedup_bam))
            # Finally sort and index the merged file using picard
            command  = mypicard.sortIndexSam(merged_dedup_bam, sorted_mdedup_bam, MEM_PER_CORE, path_to_picard)
            jobidpss= jobrunfunc(jname+'ps', command, SINGLE_CORE, cwd=sp.output_dir, walltime=WT_SHORT, pmem=None, 
                                  deps=jobidmmd, stdout='sam_smdup.log', email_addresses=my_email)
            jobidps.extend([jobidpss])
        
        if os.path.exists(merged_dedup_bam) and os.path.exists(sample_raw_merged_bam):            
            args = ['rm',merged_dedup_bam, sample_raw_merged_bam]
            command = ",".join(args).replace(',',' ').replace(';',',')
            jobidpss2= jobrunfunc(jname+'rm', command, SINGLE_CORE, cwd=sp.output_dir, walltime=WT_SHORT, pmem=None, 
                                      deps=jobidpss, stdout='sam_clean.log', email_addresses=my_email)
            jobidps.extend([jobidpss2])
        
    return  jobidps



if __name__ == '__main__':
    
    optionparser = OptionParser("usage: %prog [options] ")
    optionparser.add_option("-r", "--config_file", dest="config_file",
                            help="file with run configuration")
    optionparser.add_option("-a", "--analysis_file", dest="analysis_file",
                            help="file with experiment configuration") 
      
    optionparser.add_option("--local", dest="local", action="store_true", default=False)
    optionparser.add_option("--cluster", dest="cluster", action="store_true", default=False)
    optionparser.add_option("-p", "--processes", type=int, dest="num_processors", default=1)

    (options, args) = optionparser.parse_args()    

    config = ExomePipelineConfig()
    config.from_xml(options.config_file)
    analysis = ExomeAnalysisConfig()
    analysis.from_xml(options.analysis_file)# , config.output_dir removed on 02-13-2012

            
    if not (options.local ^ options.cluster):
        optionparser.error("Must set either --local or --cluster to run job")
    if options.local:
        jobrunfunc = run_local
    elif options.cluster:
        jobrunfunc = qsub_cac
    deps=None
    bam_cleaner_samples(analysis, config, options.num_processors, jobrunfunc, deps)