'''
Created on Nov 17, 2010

@author: oabalbin
'''

import os
import glob
from optparse import OptionParser

from exome.jobs.base import JOB_SUCCESS, JOB_ERROR
from exome.jobs.job_runner import qsub, run_local
from exome.jobs.config import ExomePipelineConfig, ExomeAnalysisConfig
from collections import defaultdict, deque

# Global Variables
NODE_MEM=45000.0
NODE_CORES=12
SINGLE_CORE=1
MEM_PER_CORE= int(float(NODE_MEM) / NODE_CORES)
# wt=walltime
WT_SHORT= "24:00:00"
WT_LONG= "60:00:00" #"100:00:00"


def check_create_dir(root_path, dir_name=None):
    if not os.path.isdir(root_path):
            os.mkdir( root_path )
    if dir_name is not None:
        subfolder=os.path.join(root_path,dir_name)
        if not os.path.isdir(subfolder):
                os.mkdir( subfolder )
    else:             
        subfolder=root_path

    return subfolder


def read_files_folder(folderpath,ext):
    ''' '''
    # Read files in folder
    myfiles=[]
    for infile in glob.glob( os.path.join(folderpath, '*'+ext) ):
        myfiles.append(infile)
        #print "current file is: " + infile
    return myfiles


def samtools_mpileup(ref_genome, list_bam_files, outfile, path_to_sam, target_regions=None):
    '''
    Returns the shell to execute a pile up using sam tools 
    param is a dictionary with the input param
    '''
    
    samtools_command=path_to_sam+'samtools'
    if target_regions is not None:
        args=[samtools_command,'mpileup','-gf',ref_genome] + list_bam_files +['-l', target_regions, '>', outfile]
    else:
        args=[samtools_command,'mpileup','-gf',ref_genome] + list_bam_files +['>', outfile]
        
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command

def bcftools_call_snps(bcf_file_name, path_to_sam):
    '''
    Call snps candidates using bcftools
    This tool converts BCF to VCF, indexes BCF for random access, concatenates (not merges) BCFs, 
    estimate site allele frequencies and calls SNP candidates.
    '''
    bcftools_command=path_to_sam+'bcftools/bcftools'
    outfile_name=bcf_file_name.replace('.bcf','.vcf')
    args = [bcftools_command, 'view', '-vcg', bcf_file_name, '>', outfile_name]
    
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command, outfile_name


def sam_bcftools_mpileup(ref_genome, list_bam_files, outfile, path_to_sam, target_regions=None):
    '''
    '''
    samtools_command=path_to_sam+'samtools'
    bcftools_command=path_to_sam+'bcftools/bcftools'
    outtem=open(outfile,'w')
    outtem.close()
    
    if target_regions is not None:
        args=[samtools_command,'mpileup','-ugf',ref_genome] + list_bam_files +['-l', target_regions, '|'] +\
        [bcftools_command, 'view', '-vcg', '-', '>', outfile]
    else:
        args=[samtools_command,'mpileup','-ugf',ref_genome] + list_bam_files +['|']+\
        [bcftools_command, 'view', '-vcg', '-', '>', outfile]
        
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command



##### vcf files analysis using vcftools
'''
/exds/sw/bioinfo/alignment/vcftools/cpp/vcftools --vcf s_3_12_sequence.aln.rmdup.candidates.vcf 
--minQ 20 --out /exds/users/oabalbin/projects/snps/exomes/aM18/s_3_12_sequence.aln.rmdup.candidatesR 
--recode --keep-INFO DP --keep-INFO AF1 --keep-INFO MQ --keep-INFO CI95 --keep-INFO INDEL
You can also filter by mean depth of coverage an other options, look at the vcf tools.
'''


def apply_snps_filters(vcf_file_name, outfile_name, filters_dict, path_to_vcftools):
    '''
    Apply filters to the vcf file in order to generate the final
    call of snps. 
    '''
    filters=[]
    for thf, value in filters_dict.iteritems():
        filters.append(thf)
        filters.append(value)
        
    outfile_name = outfile_name.replace('.recode.vcf','')
    vcftools_command=path_to_vcftools+'cpp/vcftools'
    args=[vcftools_command,'--vcf',vcf_file_name] + filters + \
    ['--out',outfile_name,'--recode', '--keep-INFO', 'DP', 
     '--keep-INFO', 'AF1', '--keep-INFO', 'MQ','--keep-INFO', 
     'CI95', '--keep-INFO', 'INDEL']
    
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command

    
####################
def vfc_validation(inputfile_name, path_to_vcftools):
    '''
    It validates the vcf input file.
    It is good to check if the vcf files are ok after calling the snps with sam-tools
    It is not necessary when running other vcf-tools because they do also the check 
    '''
    vcftools_command=path_to_vcftools+'vcf-validator'
    args=[vcftools_command,inputfile_name]
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command



def snps_stats(inputfile_name,outfile_name,path_to_vcftools):
    '''
    '''
    vcftools_command=path_to_vcftools+'bin/vcf-stats'
    #outfile_name=inputfile_name.replace('.vcf','.vcf.stats')
    args = [vcftools_command, inputfile_name, '>', outfile_name]
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command

def vcf2tab(inputfile_name):
    '''
    It writes the vcf file into a tab delimited format with the actual variants, not all the ALT
    If the files are gzip=> zcat, otherwise cat
    '''
    outfile_name=inputfile_name.replace('.vcf','.tab')
    args = ['cat', inputfile_name,'|','inputfile_name']
    f = open(outfile_name, "w")
    #retcode = subprocess.call(args, stdout=f)
    f.close()
    
    return retcode


def shared_snps_btw_samples(inpufile_list,pathdir, path_to_vcftools):
    '''
    Creates intersections and complements of two or more VCF files. 
    Given multiple VCF files, it can output the list of positions which are shared by at least N files, at most N files, exactly N files, etc. 
    It receives a list of input files
    '''
    vcftools_command=path_to_vcftools+'bin/vcf-isec'
    outfile_name=pathdir+'.vcf.isec.gz'
    
    args = [vcftools_command, '-n', '=', len(inpufile_list)]+ inpufile_list + ['|','bgzip','-c'] +['>',outfile_name]
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command, outfile_name

       
def snps_calling_pairedSamples(analysis, configrun, num_processors, jobrunfunc):
    
    # Notes to improve this main here.
    # remember that you could use the output fromthe realignment 
    # of gatk and then call on it.
    # In  that case you maybe want the unmerged files and the 
    # remove duplicates. Or check if filters can be applied
    # when using mpileup or if mpileup will receive a merged file
    
    ####
    
    extra_mem, num_cores = configrun.gatk_use_mem, configrun.gatk_num_cores
    path_to_vcftools, path_to_picard, path_to_sam = configrun.vcftools_path, configrun.picard_path, \
                                                configrun.samtools_path
    target_exons = configrun.gatk_target_exons
    genomes = configrun.genomes['human']
    ref_genome, snpdb_vcf, indeldb_file = genomes.gatk_ref_genome, genomes.snpdb, \
                                         genomes.indeldb
    hapmap_vcf, tgk_vcf = genomes.hapmap, genomes.OneKgenomes 
    my_email=configrun.email_addresses
    
    # In samtools we apply hard filtering in order to nominate the final 
    # snps candidates
    snps_filters_dict={'--minQ': '20'}
    i=0
    
    cohort_samples = defaultdict(deque)
    for sp in analysis.samples:
        if sp.category=='benign':
            cohort_samples['benign'].append(sp.sorted_mmarkdup_bam)
        else:
            cohort_samples['tumor'].append(sp.sorted_mmarkdup_bam)

    
    check_create_dir(analysis.sam_calls_dir)
           
    for sptype, list_bam_samples in cohort_samples.iteritems():
        #
        jobn='sst'
        jobn=jobn+sptype#+str(i)        
        # Do the mpileup.To call on normals
        # an tumors independently
        if sptype == 'benign':
            vcf_file_name=analysis.benign_mpileup_file
            filtered_vcf_file=analysis.benign_filtered_snps
            snps_stats_file=analysis.benign_snps_stats
        elif sptype == 'tumor':
            vcf_file_name=analysis.tumor_mpileup_file
            filtered_vcf_file=analysis.tumor_filtered_snps
            snps_stats_file=analysis.tumor_snps_stats
        
        command = sam_bcftools_mpileup(ref_genome, list(list_bam_samples), vcf_file_name, path_to_sam, target_exons)
        # Create the vcf file of candidate snps
        jobidbcf = jobrunfunc(jobn+'_vcf', command, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=extra_mem, 
                           deps=None, stdout=None, email_addresses=my_email)
        
        
        # Filter candidate snps according to qualtity and coverage depth
        # vcftools to do this task
        command = apply_snps_filters(vcf_file_name, filtered_vcf_file, snps_filters_dict, path_to_vcftools)
        jobidvcf = jobrunfunc(jobn+'_fts', command, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=extra_mem, 
                           deps=jobidbcf, stdout=None, email_addresses=my_email)
        
        # Report stats. It could be done using vcftools or gatk
        command = snps_stats(filtered_vcf_file,snps_stats_file, path_to_vcftools)
        
        jobidvcf = jobrunfunc(jobn+'_st', command, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=None, 
                           deps=jobidvcf, stdout=None, email_addresses=my_email)
        i+=1
        

def snps_calling_multipleSamples(analysis, config, num_processors, jobrunfunc):
    print 'This is function is not implemented yet. It is work in progress'
    

if __name__ == '__main__':
    
    optionparser = OptionParser("usage: %prog [options] ")
    optionparser.add_option("-r", "--config_file", dest="config_file",
                            help="file with run configuration")
    optionparser.add_option("-a", "--analysis_file", dest="analysis_file",
                            help="file with experiment configuration") 
    optionparser.add_option("--paired_samples", dest="paired_samples", action="store_true", default=False,
                            help="paired samples snv calling") 
    optionparser.add_option("--multi_samples", dest="multi_samples", action="store_true", default=False,
                            help="multi-sample snv calling")  
    optionparser.add_option("--local", dest="local", action="store_true", default=False)
    optionparser.add_option("--cluster", dest="cluster", action="store_true", default=False)
    optionparser.add_option("-p", "--processes", type=int, dest="num_processors", default=1)

    (options, args) = optionparser.parse_args()    

    config = ExomePipelineConfig()
    config.from_xml(options.config_file)
    analysis = ExomeAnalysisConfig()
    analysis.from_xml(options.analysis_file, config.output_dir)

    
    if not (options.local ^ options.cluster):
        optionparser.error("Must set either --local or --cluster to run job")
    if options.local:
        jobrunfunc = run_local
    elif options.cluster:
        jobrunfunc = qsub
    
    if options.multi_samples:
        # Work in progress. 12-14-10
        snps_calling_multipleSamples(analysis, config, options.num_processors, jobrunfunc)
    elif options.paired_samples:        
        snps_calling_pairedSamples(analysis, config, options.num_processors, jobrunfunc)

    