'''
Created on Dec 9, 2010

@author: oabalbin
'''

import os
import exome.gatk_cluster.snps_calling_cluster as snp
#import exome.variantEval.snps_annotator as ann
from collections import defaultdict, deque
from optparse import OptionParser

from exome.jobs.base import JOB_SUCCESS, JOB_ERROR
from exome.jobs.job_runner import qsub_cac, qsub_loc, run_local
from exome.jobs.config import ExomePipelineConfig, ExomeAnalysisConfig
from exome.snps_callers.pileups import do_pileup
from exome.variantEval.variant_intersector import SNPs_isec
from exome.variantEval.snps_annotator import create_snps_annotation



# Global Variables
NODE_MEM=45000.0
NODE_CORES=12
SINGLE_CORE=1
MEM_PER_CORE= int(float(NODE_MEM) / NODE_CORES)
# wt=walltime
WT_SHORT= "24:00:00"
WT_LONG= "60:00:00" #"100:00:00"


def myddict(): 
    return defaultdict(deque)
  
def nested_ddict(): 
    return defaultdict(myddict)

def check_create_dir(root_path, dir_name=None):
    if not os.path.isdir(root_path):
            os.mkdir( root_path )
    if dir_name is not None:
        subfolder=os.path.join(root_path,dir_name)
        if not os.path.isdir(subfolder):
                os.mkdir( subfolder )
    else:             
        subfolder=root_path

    return subfolder

def samtools_mpileup(ref_genome, list_bam_files, outfile, path_to_sam, target_regions=None):
    '''
    Returns the shell to execute a pile up using sam tools 
    param is a dictionary with the input param
    '''
    
    samtools_command=path_to_sam+'samtools'
    if target_regions is not None:
        args=[samtools_command,'mpileup','-f',ref_genome] + list_bam_files +['-l', target_regions, '>', outfile]
    else:
        args=[samtools_command,'mpileup','-f',ref_genome] + list_bam_files +['>', outfile]
        
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command


def define_patients_cohort(analysis):
    '''
    Define a patient and samples that correspond to that patient.
    Divide patient samples into benign and tumor samples
    '''
    cohort_patients = nested_ddict()
    for sp in analysis.samples:
        #cohort_samples = defaultdict(deque)
        analysis.varscan_files()
        if sp.category=='benign':
            cohort_patients[sp.patient_id]['benign'].append(sp.benign_reg_mpileup_file)
        else:
            cohort_patients[sp.patient_id]['tumor'].append(sp.tumor_reg_mpileup_file)
        print sp.patient_id
        
        #cohort_patients[sp.patient_id] = cohort_samples

    return cohort_patients


def snps_calls_VarScan_somatic(list_bam_normals,list_bam_tumors, snps_calls_file, varscan_options, use_mem, path_to_varscan):
    '''
    java -jar VarScan.v2.2.jar somatic normal.pileup tumor.pileup output.basename
    The above command will report germline, somatic, and LOH events at positions 
    where both normal and tumor samples have sufficient coverage (default: 8). 
    If the --validation flag is set to 1, it also report non-variant (reference) 
    calls at all positions with sufficient coverage, for the purpose of refuting 
    false positives with validation data. 
    
    Writes:
    output.snp
    output.indel
    
    --output-snp    Output file for SNP calls [output.snp]
    --output-indel    Output file for indel calls [output.indel]
    --min-coverage    Minimum coverage in normal and tumor to call variant [10]
    --min-coverage-normal    Minimum coverage in normal to call somatic [10]
    --min-coverage-tumor    Minimum coverage in tumor to call somatic [5]
    --min_var_freq    Minimum variant frequency to call a heterozygote [0.20]
    --p-value    P-value threshold to call a heterozygote [1.0e-01]
    --somatic-p-value    P-value threshold to call a somatic site [1.0e-04]
    
    '''
    
    if varscan_options:
        options = ['--min-coverage',varscan_options[0],
                   '--min-coverage-normal',varscan_options[1],
                   '--min-coverage-tumor',varscan_options[2],
                   '--min-var-freq',varscan_options[3],
                   '--p-value', varscan_options[4],
                   '--somatic-p-value',varscan_options[5]
                   ]
    else:
        options=[]

    
    varscan_command=path_to_varscan+'varscan'
    args = ['java', '-Xmx'+str(use_mem)+'m', '-jar', varscan_command, 'somatic']+\
           list_bam_normals + list_bam_tumors + [snps_calls_file] + options#+ [indels_calls_file]
    
    #args= [a.replace(',',';') for a in args]
    #command = ",".join(args).replace(',',' ').replace(';',',')
    return args


def process_somatic_calls(snps_calls_file, path_to_varscan):
    '''
    java -jar VarScan.v2.2.jar processSomatic [output.snp]

    Writes:
    output.snp.Somatic.hc     (high-confidence Somatic mutations)
    output.snp.Somatic.lc     (low-confidence Somatic mutations)
    output.snp.Germline     (sites called Germline)
    output.snp.LOH         (sites called loss-of-heterozygosity, or LOH)    -
    '''
    varscan_command=path_to_varscan+'varscan'
    args = ['java', '-jar', varscan_command, 'processSomatic', snps_calls_file]
    
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command


def snps_calls_VarScan_single_sample(list_bam_files, snps_calls_file, use_mem, varscan_options, path_to_varscan):
    '''
    It will call the snps on a single sample    
    Writes:
    output.snp
    output.indel
    --min-coverage  Minimum read depth at a position to make a call [8]
    --min-reads2    Minimum supporting reads at a position to call variants [2]
    --min-avg-qual  Minimum base quality at a position to count a read [15]
    --min-var-freq  Minimum variant allele frequency threshold [0.01]
    --p-value       Default p-value threshold for calling variants [99e-02]
    '''
    
    if varscan_options:
        options = ['--min-coverage',varscan_options[0],
                   '--min-reads2',varscan_options[1],
                   '--min-avg-qual',varscan_options[2],
                   '--min-var-freq',varscan_options[3],
                   '--p-value', varscan_options[4]
                   ]
    else:
        options=[]
    
    f=open(snps_calls_file,'w')
    f.close()
    varscan_command=path_to_varscan+'varscan'
    args = ['java', '-Xmx'+str(use_mem)+'m', '-jar', varscan_command, 'pileup2snp'] + \
            list_bam_files + options + ['>'] + [snps_calls_file]
    
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command

        
        
def snps_byVarScan_byPatient(analysis, configrun, jobrunfunc, prev_deps):
    '''
    '''
    
    extra_mem, num_cores = configrun.gatk_use_mem, configrun.gatk_num_cores
    path_to_picard, path_to_sam = configrun.picard_path, configrun.samtools_path
    path_to_varscan = configrun.varscan_path
    varscan_options=map(str,[1,10,8,0.20,0.1,0.0001])
    
    #path_to_vcfCodSNPs = configrun.vcfCodSNPs_path
    # for gatk
    '''
    genomes = configrun.genomes['human']
    ref_genome, snpdb_vcf, indeldb_file = genomes.gatk_ref_genome, genomes.snpdb, \
                                         genomes.indeldb
    hapmap_vcf, tgk_vcf = genomes.hapmap, genomes.OneKgenomes 
    #vcfCodings parameters
    vcfCodSNP_genome = genomes.vcf_ref_genome
    annot_genelist = configrun.annot_genelist
    path_to_vcfCodSNPs = configrun.vcf_annot_path
    
    --min-coverage    Minimum coverage in normal and tumor to call variant [10]
    --min-coverage-normal    Minimum coverage in normal to call somatic [10]
    --min-coverage-tumor    Minimum coverage in tumor to call somatic [5]
    --min_var_freq    Minimum variant frequency to call a heterozygote [0.20]
    --p-value    P-value threshold to call a heterozygote [1.0e-01]
    --somatic-p-value    P-value threshold to call a somatic site [1.0e-04]

    '''
    
    # email.
    my_email=configrun.email_addresses
    analysis.varscan_files()

    if prev_deps is not None:
        deps=prev_deps[:]
    else:
        deps=[]
    
    check_create_dir(analysis.varscan_calls_dir)
    deps_list=[]
    jobn='vs'
    cohort_patients=define_patients_cohort(analysis)
    for pid, cohort_samples in cohort_patients.iteritems():
        
        snps_varscan_somatic = analysis.snps_varscan_somatic
        indels_varscan_somatic = analysis.indels_varscan_somatic
        list_bam_normals = list(cohort_samples['benign'])
        list_bam_tumors = list(cohort_samples['tumor'])
        
        
        cmd = snps_calls_VarScan_somatic(list_bam_normals,list_bam_tumors, snps_varscan_somatic, 
                                   varscan_options, extra_mem, path_to_varscan)
        
        
        jobidig = jobrunfunc(jobn+'_'+pid+'_so', cmd, SINGLE_CORE, cwd=None, walltime=WT_LONG, pmem=extra_mem, 
                          deps=deps, stdout=None, email_addresses=my_email)

        # transform the output file into a vcf file or find another way to annotate it.         
        deps_list.extend([jobidig])
       
       
    return deps_list

            
def snps_byVarScan_bySingleSample(analysis, configrun, jobrunfunc, prev_deps):
    '''
    '''
    
    extra_mem, num_cores = configrun.gatk_use_mem, configrun.gatk_num_cores
    path_to_picard, path_to_sam = configrun.picard_path, configrun.samtools_path
    path_to_varscan = configrun.varscan_path
    # Hard threshold options
    varscan_options=map(str,[1,1,13,0.01,0.1])
    
    #path_to_vcfCodSNPs = configrun.vcfCodSNPs_path
    # for gatk
    '''
    genomes = configrun.genomes['human']
    ref_genome, snpdb_vcf, indeldb_file = genomes.gatk_ref_genome, genomes.snpdb, \
                                         genomes.indeldb
    hapmap_vcf, tgk_vcf = genomes.hapmap, genomes.OneKgenomes 
    #vcfCodings parameters
    vcfCodSNP_genome = genomes.vcf_ref_genome
    annot_genelist = configrun.annot_genelist
    path_to_vcfCodSNPs = configrun.vcf_annot_path
    '''
    
    # email.
    my_email=configrun.email_addresses
    analysis.varscan_files()

    if prev_deps is not None:
        deps=prev_deps[:]
    else:
        deps=[]
    
    deps_list=[]
    jobn='vs'
    check_create_dir(analysis.varscan_calls_dir)
    cohort_samples = defaultdict(deque)
    
    benign,tumor=False,False
    for sp in analysis.samples:
        if sp.category=='benign':
            cohort_samples['benign'].append(analysis.benign_reg_mpileup_file)
            benign=True
        else:
            cohort_samples['tumor'].append(analysis.tumor_reg_mpileup_file)
            tumor=True

    
    for pid,cohort_samples in cohort_samples.iteritems():
        
        snps_varscan_single = analysis.snps_varscan_single
        indels_varscan_single = analysis.indels_varscan_single
        
        if pid == 'benign':
            list_bam_files = list(cohort_samples)
        else:
            list_bam_files = list(cohort_samples)
        
        cmd = snps_calls_VarScan_single_sample(list_bam_files, snps_varscan_single,
                                                 extra_mem, varscan_options, path_to_varscan)
        
        jobidig = jobrunfunc(jobn+'_'+pid+'_so', cmd, SINGLE_CORE, cwd=None, walltime=WT_LONG, pmem=extra_mem, 
                          deps=deps, stdout=None, email_addresses=my_email)

        # transform the output file into a vcf file or find another way to annotate it.         
        deps_list.extend([jobidig])
       
       
    return deps_list



if __name__ == '__main__':
            
    optionparser = OptionParser("usage: %prog [options] ")
    optionparser.add_option("-r", "--config_file", dest="config_file",
                            help="file with run configuration")
    optionparser.add_option("-a", "--analysis_file", dest="analysis_file",
                            help="file with experiment configuration") 
    optionparser.add_option("--paired_samples", dest="paired_samples", action="store_true", default=False,
                            help="paired samples snv calling") 
    optionparser.add_option("--multi_samples", dest="multi_samples", action="store_true", default=False,
                            help="multi-sample snv calling") 
    optionparser.add_option("--local", dest="local", action="store_true", default=False)
    optionparser.add_option("--cluster", dest="cluster", action="store_true", default=False)
    optionparser.add_option("-p", "--processes", type=int, dest="num_processors", default=1)

    (options, args) = optionparser.parse_args()    

    config = ExomePipelineConfig()
    config.from_xml(options.config_file)
    analysis = ExomeAnalysisConfig()
    analysis.from_xml(options.analysis_file, config.output_dir)
    # Default when called from the command line
    depends=[]
    
    if not (options.local ^ options.cluster):
        optionparser.error("Must set either --local or --cluster to run job")
    if options.local:
        jobrunfunc = run_local
    elif options.cluster:
        jobrunfunc = qsub_cac
    elif options.local_cluster:
        jobrunfunc = qsub_loc
        
    if options.multi_samples:
        analysis_type='cohort'
        use_recal_files=False
        depends.extend(do_pileup(analysis, config, use_recal_files, jobrunfunc, depends, analysis_type))
        snps_byVarScan_bySingleSample(analysis, config, jobrunfunc, depends)
    elif options.paired_samples:
        analysis_type='bypatient'
        use_recal_files=False
        depends.extend(do_pileup(analysis, config, use_recal_files, jobrunfunc, depends, analysis_type))
        snps_byVarScan_byPatient(analysis, config, jobrunfunc, depends)


        

    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
        
