'''
Created on Nov 17, 2010

@author: oabalbin
'''

import os
import glob
import subprocess
import exome.gatk_cluster.cluster_jobs_header as jh
#from collections import deque

'''
def check_bfrm_output(self, bfrmfolder,newFFile=[]):
     """
     Check if files  mA.txt, mF.txt, mPostPib.txt, exist
     """
     for f in bfrmof:
         fname = bfrmfolder + f
         print fname
         if os.path.isfile(fname):
             files.append(fname)

     return files
'''

def read_files_folder(folderpath,ext):
    ''' '''
    # Read files in folder
    myfiles=[]
    for infile in glob.glob( os.path.join(folderpath, '*'+ext) ):
        myfiles.append(infile)
        #print "current file is: " + infile
    return myfiles


def samtools_mpileup(ref_genome, list_bam_files, outfile, path_to_sam, target_regions=None):
    '''
    Returns the shell to execute a pile up using sam tools 
    param is a dictionary with the input param
    '''
    
    samtools_command=path_to_sam+'samtools'
    if target_regions is not None:
        args=[samtools_command,'mpileup','-gf',ref_genome] + list_bam_files +['-l', target_regions, '>', outfile]
    else:
        args=[samtools_command,'mpileup','-gf',ref_genome] + list_bam_files +['>', outfile]
        
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command

def bcftools_call_snps(bcf_file_name, path_to_sam):
    '''
    Call snps candidates using bcftools
    This tool converts BCF to VCF, indexes BCF for random access, concatenates (not merges) BCFs, 
    estimate site allele frequencies and calls SNP candidates.
    '''
    bcftools_command=path_to_sam+'bcftools/bcftools'
    outfile_name=bcf_file_name.replace('.bcf','.vcf')
    args = [bcftools_command, 'view', '-vcg', bcf_file_name, '>', outfile_name]
    
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command, outfile_name


def sam_bcftools_mpileup(ref_genome, list_bam_files, outfile, path_to_sam, target_regions=None):
    '''
    '''
    samtools_command=path_to_sam+'samtools'
    bcftools_command=path_to_sam+'bcftools/bcftools'
    
    if target_regions is not None:
        args=[samtools_command,'mpileup','-ugf',ref_genome] + list_bam_files +['-l', target_regions, '|'] +\
        [bcftools_command, 'view', '-vcg', '-', '>', outfile]
    else:
        args=[samtools_command,'mpileup','-ugf',ref_genome] + list_bam_files +['|']+\
        [bcftools_command, 'view', '-vcg', '-', '>', outfile]
        
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command



##### vcf files analysis using vcftools
'''
/exds/sw/bioinfo/alignment/vcftools/cpp/vcftools --vcf s_3_12_sequence.aln.rmdup.candidates.vcf 
--minQ 20 --out /exds/users/oabalbin/projects/snps/exomes/aM18/s_3_12_sequence.aln.rmdup.candidatesR 
--recode --keep-INFO DP --keep-INFO AF1 --keep-INFO MQ --keep-INFO CI95 --keep-INFO INDEL
You can also filter by mean depth of coverage an other options, look at the vcf tools.
'''


def apply_snps_filters(vcf_file_name, filters_dict, path_to_vcftools):
    '''
    Apply filters to the vcf file in order to generate the final
    call of snps. 
    '''
    filters=[]
    for thf, value in filters_dict.iteritems():
        filters.append(thf)
        filters.append(value)
        
    outfile_name = vcf_file_name.replace('.vcf','.filtered.vcf')
    vcftools_command=path_to_vcftools+'cpp/vcftools'
    #'/exds/sw/bioinfo/alignment/vcftools/cpp/vcftools'
    args=[vcftools_command,'--vcf',vcf_file_name] + filters + \
    ['--out',outfile_name,'--recode', '--keep-INFO', 'DP', 
     '--keep-INFO', 'AF1', '--keep-INFO', 'MQ','--keep-INFO', 
     'CI95', '--keep-INFO', 'INDEL']
    
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command, outfile_name

    
####################
def vfc_validation(inputfile_name, path_to_vcftools):
    '''
    It validates the vcf input file.
    It is good to check if the vcf files are ok after calling the snps with sam-tools
    It is not necessary when running other vcf-tools because they do also the check 
    '''
    vcftools_command=path_to_vcftools+'vcf-validator'
    args=[vcftools_command,inputfile_name]
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command



def snps_stats(inputfile_name,path_to_vcftools):
    '''
    '''
    vcftools_command=path_to_vcftools+'bin/vcf-stats'
    outfile_name=inputfile_name.replace('.vcf','.vcf.stats')
    args = [vcftools_command, inputfile_name, '>', outfile_name]
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command, outfile_name

def vcf2tab(inputfile_name):
    '''
    It writes the vcf file into a tab delimited format with the actual variants, not all the ALT
    If the files are gzip=> zcat, otherwise cat
    '''
    outfile_name=inputfile_name.replace('.vcf','.tab')
    args = ['cat', inputfile_name,'|','inputfile_name']
    f = open(outfile_name, "w")
    retcode = subprocess.call(args, stdout=f)
    f.close()
    
    return retcode


def shared_snps_btw_samples(inpufile_list,pathdir, path_to_vcftools):
    '''
    Creates intersections and complements of two or more VCF files. 
    Given multiple VCF files, it can output the list of positions which are shared by at least N files, at most N files, exactly N files, etc. 
    It receives a list of input files
    '''
    vcftools_command=path_to_vcftools+'bin/vcf-isec'
    outfile_name=pathdir+'.vcf.isec.gz'
    
    args = [vcftools_command, '-n', '=', len(inpufile_list)]+ inpufile_list + ['|','bgzip','-c'] +['>',outfile_name]
    args= [a.replace(',',';') for a in args]
    command = ",".join(args).replace(',',' ').replace(';',',')

    return command, outfile_name

       
def snps_calling_multipleSamples(cohort_dict, run_dict):
    
    #run_param = config_run(configrun)
    #experiment_run = config_experiment(configexp)
    #ext='.rmdup.sorted.bam.bam'
    
    # Notes to improve this main here.
    # remember that you could use the output fromthe realignment 
    # of gatk and then call on it.
    # In  that case you maybe want the unmerged files and the 
    # remove duplicates. Or check if filters can be applied
    # when usin mpileup or if mpileup will receive a merged file
    
    ####
    
    ref_genome =  run_dict['ref_genome']
    path_to_sam, path_to_vcftools = run_dict['path_to_sam'], run_dict['path_to_vcftools']
    target_exons = run_dict['target_exons']
    out_dir = run_dict['out_dir']
    my_email=run_dict['myemail']
    
    # cluster parameters
    node_memory = 45000.0
    node_processors = 6
    single_processor=1
    mem_per_processors = int(float(node_memory) / node_processors)
    extra_mem=8000
    # wt=walltime
    short_wtime = "24:00:00"
    long_wtime="60:00:00"
    # Run parameters
    mask_indel_padding = 10
    
    # In samtools we apply hard filtering in order to nominate the final 
    # snps candidates
    snps_filters_dict={'--minQ': '20'}

    jobn='sst'
    i=0
    for cohort, cohort_samples in cohort_dict.iteritems():
        #list_bam_files_normal = cohort_samples['normals']
        #list_bam_files_tumor = cohort_samples['tumors']
        #list_bam_all_samples = list_bam_files_normal+list_bam_files_tumor
        
        for sptype, list_bam_all_samples in cohort_samples.iteritems():
        
            jobn=jobn+str(i)
            
            # Do the mpileup. Note that in this case I am using 
            # a full list of normals and tumor together to call
            # the variants. But It could be desirable to call on normals
            # an tumors independently
            #bcf_mpileup_file = out_dir+cohort+'.'+sptype+'.sam_snps.mpileup.ccf'
            vcf_file_name = out_dir+cohort+'.'+sptype+'.sam_snps.mpileup.vcf'
            #command = samtools_mpileup(ref_genome, list_bam_all_samples, bcf_mpileup_file, path_to_sam, target_exons)
            command = sam_bcftools_mpileup(ref_genome, list_bam_all_samples, vcf_file_name, path_to_sam, target_exons)
            #jobidmpu = jh.qsub(jobn+'_mpu', command, single_processor, cwd=None, walltime=long_wtime, pmem=extra_mem, 
            #                   deps=None, stdout=None, email_addresses=my_email)
            
            # Create the vcf file of candidate snps
            #command, vcf_file_name = bcftools_call_snps(bcf_mpileup_file, path_to_sam)
            jobidbcf = jh.qsub(jobn+'_bcf', command, single_processor, cwd=None, walltime=short_wtime, pmem=extra_mem, 
                               deps=None, stdout=None, email_addresses=my_email)
            
            
            # Filter candidate snps according to qualtity and coverage depth
            # vcftools to do this task
            command, filtered_vcf_file = apply_snps_filters(vcf_file_name, snps_filters_dict, path_to_vcftools)
            jobidvcf = jh.qsub(jobn+'_vcf', command, single_processor, cwd=None, walltime=short_wtime, pmem=extra_mem, 
                               deps=jobidbcf, stdout=None, email_addresses=my_email)
            
            
            # Report stats. It could be done using vcftools or gatk
            command, snps_stats_file = snps_stats(filtered_vcf_file,path_to_vcftools)
            
            jobidvcf = jh.qsub(jobn+'_st', command, single_processor, cwd=None, walltime=short_wtime, pmem=None, 
                               deps=jobidvcf, stdout=None, email_addresses=my_email)
            i+=1
 
                
    return filtered_vcf_file
    

if __name__ == '__main__':
    
    cohort_dict = {'Prost':{'normals':['/nobackup/med-mctp/oabalbin/test/s_4_12_sequence.txt.psorted.realigned.fixMate.markdup.bam'],
                            'tumors':['/nobackup/med-mctp/oabalbin/test/s_3_12_sequence.txt.psorted.realigned.fixMate.markdup.bam']}}
    
    run_dict = {'path_to_gatk':'/nobackup/med-mctp/sw/bioinfo/gatk/GenomeAnalysisTK-1.0.4705/',
                     'path_to_picard':'/nobackup/med-mctp/sw/bioinfo/picard/picard-tools-1.35/', 
                     'path_to_bwa':'/nobackup/med-mctp/sw/bioinfo/alignment/bwa/bwa-0.5.8a/',
                     'path_to_sam':'/nobackup/med-mctp/sw/bioinfo/samtools/samtools-0.1.10/',
                     'path_to_vcftools':'/nobackup/med-mctp/sw/bioinfo/vcftools/',
                     'resources_folder':'/nobackup/med-mctp/sw/bioinfo/gatk/GenomeAnalysisTK-1.0.4705/resources/', 
                     'rscipt_path':'/home/software/rhel5/R/2.10.1-gcc/bin/Rscript',
                     'use_mem':45000.0, 'num_cores':6,
                     'nmismatches':2,
                     'ref_genome':'/nobackup/med-mctp/sw/alignment_indexes/gatk/hg19/hg19.fa',
                     'ref_genome_bwa':'/nobackup/med-mctp/sw/alignment_indexes/bwa/hg19/hg19.fa',
                     'snpdb_file':'/nobackup/med-mctp/sw/alignment_indexes/gatk/hg19/dbsnp132_00-All_processed.vcf',
                     'indeldb_file':'/nobackup/med-mctp/sw/alignment_indexes/gatk/hg19/dbsnp132_00-All_processed.vcf',
                     'target_exons':'/nobackup/med-mctp/sw/alignment_indexes/agilent/hg19/exome/Agilent_SureSelect_All_Exon_G3362.v2.hg19.bed',
                     'recal_analysis_outputdir':'/nobackup/med-mctp/oabalbin/test/recal_analysis/',
                     'temp_dir':'/nobackup/med-mctp/oabalbin/test/temp/',
                     'qsubfile':'/nobackup/med-mctp/oabalbin/test/',
                     'out_dir':'/nobackup/med-mctp/oabalbin/test/',
                     'myemail':['alebalbin@gmail.com']
                     }

    snps_calling_multipleSamples(cohort_dict, run_dict)
    
    
    
    