'''
Created on Feb 5, 2011

@author: oabalbin
A script to do general evaluation and filtering
of vcf files.
'''
import os
import subprocess

from optparse import OptionParser
from exome.jobs.base import JOB_SUCCESS, JOB_ERROR
from exome.jobs.job_runner import qsub_cac, qsub_loc, run_local
from exome.jobs.config import ExomePipelineConfig, ExomeAnalysisConfig

# Global Variables
NODE_MEM=45000.0
NODE_CORES=12
SINGLE_CORE=1
MEM_PER_CORE= int(float(NODE_MEM) / NODE_CORES)
# wt=walltime
WT_SHORT= "24:00:00"
WT_LONG= "60:00:00" #"100:00:00"
WT_ELONG="100:00:00"

def vcf_stats(vcf_file_name, outfile_name, path_to_vcftools, filters_dict=None):
    '''
    Apply filters to the vcf file in order to generate the final
    call of snps. 
    '''
    filters=[]
    if filters_dict is not None:        
        for thf, value in filters_dict.iteritems():
            filters.append(thf)
            filters.append(value)
        
    outfile_name = outfile_name.replace('.recode.vcf','')
    vcftools_command=path_to_vcftools+'cpp/vcftools'
    args=[vcftools_command,'--vcf',vcf_file_name] + filters + \
    ['--out',outfile_name, '--freq', '--site-mean-depth', 
     '--site-depth','--site-quality', '--get-INFO DP']
    
    args= [a.replace(',',';') for a in args]
    
    return args


def snps_stats(inputfile_name,outfile_name,path_to_vcftools):
    '''
    '''
    vcftools_command=path_to_vcftools+'bin/vcf-stats'
    #outfile_name=inputfile_name.replace('.vcf','.vcf.stats')
    args = [vcftools_command, inputfile_name, '>', outfile_name]

    return args

def apply_snps_filters(vcf_file_name, outfile_name, filters_dict, path_to_vcftools):
    '''
    Apply filters to the vcf file in order to generate the final
    call of snps. 
    '''
    filters=[]
    for thf, value in filters_dict.iteritems():
        filters.append(thf)
        filters.append(value)
        
    outfile_name = outfile_name.replace('.recode.vcf','')
    vcftools_command=path_to_vcftools+'cpp/vcftools'
    args=[vcftools_command,'--vcf',vcf_file_name] + filters + \
    ['--out',outfile_name,'--recode', '--freq', 
     '--het', '--singletons', '--keep-INFO', 'DP', 
     '--keep-INFO', 'AF', '--keep-INFO', 'MQ','--keep-INFO', 
     'CI95', '--keep-INFO', 'INDEL']
    
    #args= [a.replace(',',';') for a in args]
    #command = ",".join(args).replace(',',' ').replace(';',',')

    return args

def tabix_vcf_files(vcf_file):
    '''
    It first compress the vcf file
    And then it creates a tabix index of the vcf file
    '''
    bgzip=['bgzip',vcf_file]
    bgzip=",".join(bgzip).replace(',',' ')
    tabix=['tabix','-p', 'vcf', vcf_file.replace('.vcf','.vcf.gz')]
    tabix=",".join(tabix).replace(',',' ')
    
    return bgzip+'\n'+tabix

def vcf_isec(A_vcf_file, output_vcf_file, list_vcf_files, path_to_vcftools, do_complement=False):
    '''
    vcf-isec [OPTIONS] file1.vcf file2.vcf
    -c : do the complent. default None
    -f : foce the intersection evetough vcf 
        files with different columns
    '''
    if do_complement:
        complement=['-c','-f']
    else:
        complement=['-f']
        
    vcftools_command=path_to_vcftools+'bin/vcf-isec'
    args=[vcftools_command]+\
          complement+\
          [A_vcf_file]+\
          list_vcf_files+\
          ['>', output_vcf_file]
    args=",".join(args).replace(',',' ')    
    return args


def variant_evaluation(analysis, configrun, vcf_file_name, jobrunfunc):

    # Parameters
    extra_mem, num_cores = configrun.gatk_use_mem, configrun.gatk_num_cores
    path_to_vcftools, path_to_picard, path_to_sam = configrun.vcftools_path, configrun.picard_path, \
                                                configrun.samtools_path
    genomes = configrun.genomes['human']
    ref_genome, snpdb_vcf = genomes.gatk_ref_genome, genomes.snpdb
    hapmap_vcf, tgk_vcf = genomes.hapmap, genomes.OneKgenomes 
    my_email=configrun.email_addresses
    
    ###########    
    filters_dict=None
    jobn='vcf_'
    outfile_name=vcf_file_name.replace('.vcf','')
    output_vcf_file=vcf_file_name.replace('.vcf','.comp.vcf')
    snps_filters_dict_qual={'--minQ': '40'} # need one filter for coverage
    snps_filters_dict={'--minQ': '40','--non-ref-af':'0.45', '--max-non-ref-af':'0.65'} # need one filter for coverage
    vcf_filtered_file=vcf_file_name.replace('.vcf','.recode.vcf')
    do_complement=True
    list_vcf_files=[hapmap_vcf, tgk_vcf, snpdb_vcf]
    ###########
    cmd=vcf_stats(vcf_file_name, outfile_name, path_to_vcftools,filters_dict)
    
    jobidvcf = jobrunfunc(jobn+'stats', cmd, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=None, 
                           deps=None, stdout=None, email_addresses=my_email)
        
    '''
    retcode = subprocess.call(args)
    if retcode != 0:
        raise OSError("command: '%s' returned error code %s" % (' '.join(args), retcode))
    else:
        pass
        
    '''
    
    # Filter the vcf file according to snps_filters_dict
    cmd=apply_snps_filters(vcf_file_name, outfile_name, snps_filters_dict, path_to_vcftools)
    jobidvcf = jobrunfunc(jobn+'filter', cmd, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=None, 
                           deps=jobidvcf, stdout=None, email_addresses=my_email)

        
    # check if file is already in gz file, also check for the tabix file.
    print os.path.isfile(vcf_filtered_file.replace('.vcf','.vcf.gz'))
    print os.path.isfile(vcf_filtered_file.replace('.vcf','.vcf.gz.tbi'))
    if os.path.isfile(vcf_filtered_file.replace('.vcf','.vcf.gz')) and \
    os.path.isfile(vcf_filtered_file.replace('.vcf','.vcf.gz.tbi')):
        tabix='echo vcf File %s ready for intersection'%vcf_filtered_file
    else:
        tabix=tabix_vcf_files(vcf_filtered_file)
        print "dentro", tabix 
    # Intersect vfc file with known snps.
    cmd = vcf_isec(vcf_file_name, output_vcf_file, list_vcf_files, path_to_vcftools, do_complement)
    
    cmd=tabix+'\n'+cmd
    jobidvcf = jobrunfunc(jobn+'isec', cmd, SINGLE_CORE, cwd=None, walltime=WT_SHORT, pmem=None, 
                           deps=jobidvcf, stdout=None, email_addresses=my_email)

    



'''    
py_script = os.path.join(_jobs_dir, "sam_to_bam.py")
args = [sys.executable, py_script, 
        "--sort-order", "coordinate",
        "--samtools-path",path_to_samtools,
        "--picard-path", path_to_picard,
        job.align_sam_file, job.align_bam_file] 
logging.info("%s: Running SAM -> BAM conversion" % (job.name))
job_id = runfunc(job.name + "_sb", args, single_processor, cwd=job.output_dir, 
                 walltime=wt_samtools,
                 pmem=extra_mem,
                 stdout="sam2bam.log", 
                 deps=deps, 
                 email_addresses=email_addresses)
'''
    


if __name__ == '__main__':
        
    optionparser = OptionParser("usage: %prog [options] ")
    
    
    optionparser.add_option("-r", "--config_file", dest="config_file",
                            help="file with run configuration")
    optionparser.add_option("-a", "--analysis_file", dest="analysis_file",
                            help="file with experiment configuration")
    '''
    optionparser.add_option("--snps_hard_filtered", dest="snps_hard_filtered", action="store_true", default=False,
                            help="Determine if it uses the snps obtained by hard filtering or model selection. Default hard_filetered")
    optionparser.add_option("--merge_types", dest="merge_types", action="store_true", default=False,
                            help="Merge vcf files for tumor and benign samples")
    '''
    optionparser.add_option("--local", dest="local", action="store_true", default=False)
    optionparser.add_option("--cluster", dest="cluster", action="store_true", default=False)
    optionparser.add_option("-f", "--vcf_file", dest="vcf_file",
                            help="vcf_file")

    
    (options, args) = optionparser.parse_args()    

    config = ExomePipelineConfig()
    config.from_xml(options.config_file)
    analysis = ExomeAnalysisConfig()
    analysis.from_xml(options.analysis_file, config.output_dir)


    if not (options.local ^ options.cluster):
        print "Must set either --local or --cluster to run job"
    if options.local:
        jobrunfunc = run_local
    elif options.cluster:
        jobrunfunc = qsub
        
    variant_evaluation(analysis, config, options.vcf_file, jobrunfunc)
