'''
Created on Oct 8, 2011

@author: oabalbin
'''

import logging
import os
import glob
import sys
import subprocess
from optparse import OptionParser
from config import ExomePipelineConfig, ExomeAnalysisConfig
from base import JOB_SUCCESS, JOB_ERROR
from copy_aligned_bam import remote_copy_file,ssh_exec

def list_analysis_files(dir,ext):
    '''
    '''
    files = glob.glob(os.path.join(dir,'*.'+ext))
    return files

def check_raw_seq_exists(analysis_file,output_dir=None):
    '''
    '''
    #config = ExomePipelineConfig()
    #config.from_xml(config_file)
    analysis = ExomeAnalysisConfig()
    analysis.from_xml(analysis_file, output_dir)
    sp_ready=[]
    sp_not_ready=[]
    
    for sample in analysis.samples:
        for lane in sample.lanes:
            bamfile = lane.orig_aln_bam
            print bamfile
            # Check that all need sequence files are present in repository 
            if os.path.exists(bamfile):
                sp_ready.append(sample.name+'@@'+bamfile)
            else:
                sp_not_ready.append(sample.name+'@@'+bamfile)
                    
    if not sp_not_ready:
        return JOB_SUCCESS
    else:
        for sp in sp_not_ready:
            name,sfq=sp.split('@@')
            logging.info("Local fastq file %s for sample %s was not found" % (sfq,name))
        return JOB_ERROR

def test_file_exists(remote_file, remote_address, username, port):
    command = 'test -s %s && echo 1' % (remote_file)
    args = map(str, ["ssh", "-p", port, remote_address, '%s' % (command)])
    logging.debug("\targs: %s" % (args))
    p = subprocess.Popen(args, stdout=subprocess.PIPE)
    res = p.communicate()[0]
    exists = (res.strip() == "1")
    return exists


def copy_bam_files(analysis_file,output_dir):
    '''
    '''
    #config = ExomePipelineConfig()
    #config.from_xml(config_file)
    analysis = ExomeAnalysisConfig()
    analysis.from_xml(analysis_file)
    
    sp_ready=[]
    sp_not_ready=[]
    remote_copy_max_size_bytes = (8 << 30)
    server_address, username, server_ssh_port='flux-login.engin.umich.edu','oabalbin','22'
    seq_repo_mirror_dir="/scratch/arul_flux/med-mctp/projects/exome/tcga"#analysis.remote_root_dir
    retcode=0
    found=False
    '''
    patient_id
    lane name
    ./TCGA-EJ-5502/lane/tcga_TCGA-EJ-5502-10A-01D-1577-08_SI_65/align/aligned_reads.bam
    
    '''
    for sample in analysis.samples:
        for lane in sample.lanes:
            local_file = lane.orig_aln_bam
            remote_file = lane.align_bam_file_tmp
            local_file_bai = "%s.%s" % (os.path.splitext(local_file)[-2],'bam.bai')
            remote_file_bai = "%s.%s" % (os.path.splitext(remote_file)[-2],'bai')
            # copy bamfiles files to remote aligned directory 
            # replace fastq fields in XML file
            remote_file_loc = os.path.join(os.path.join(sample.patient_id,os.path.join("lane",os.path.join(lane.name,"align"))),"aligned_reads.bam")
            remote_file_loc_bai = os.path.join(os.path.join(sample.patient_id,os.path.join("lane",os.path.join(lane.name,"align"))),"aligned_reads.bai")
            
            remote_file_repo = os.path.join(seq_repo_mirror_dir,remote_file_loc)
            remote_file_repo_bai = os.path.join(seq_repo_mirror_dir,remote_file_loc_bai)
            
            print remote_file_repo
            print remote_file_repo_bai
            if test_file_exists(remote_file_repo, server_address, 
                                username, server_ssh_port) and \
                test_file_exists(remote_file_repo_bai, server_address, 
                                username, server_ssh_port):
                found = True
                logging.info("Found fastq file %s at %s" % (local_file, remote_file))
                
                command = "ln -s %s %s"%(remote_file_repo,remote_file)
                retcode1 = ssh_exec(server_address, command, server_ssh_port)
                command = "ln -s %s %s"%(remote_file_repo_bai,remote_file_bai)
                retcode2 = ssh_exec(server_address, command, server_ssh_port)
                
                if (retcode1!=0 or retcode2!=0):
                    logging.error("Making symbolic links for bam and bai files failed")
                    return JOB_ERROR

            if not found:           
                logging.info("Copying lane %s bam file" % (local_file))
                retcode = remote_copy_file(local_file, remote_file, 
                                           server_address, username, server_ssh_port, 
                                           maxsize=remote_copy_max_size_bytes, 
                                           tmp_dir=output_dir)
                if retcode != 0:
                    logging.error("Copy of bam file failed")
                    return JOB_ERROR
    
                logging.info("Copying lane %s bai file" % (local_file_bai))
                retcode = remote_copy_file(local_file_bai, remote_file_bai, 
                                           server_address, username, server_ssh_port, 
                                           maxsize=remote_copy_max_size_bytes, 
                                           tmp_dir=output_dir)
            
                if retcode != 0:
                    logging.error("Copy of bai file failed")
                    return JOB_ERROR
    
    return JOB_SUCCESS

    
def main():
    logging.basicConfig(level=logging.DEBUG,
                        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    
    optionparser = OptionParser("usage: %prog [options] ")
    
    optionparser.add_option("-r", "--config_file", dest="config_file",
                            help="file with run configuration")
    optionparser.add_option("-a", "--analysis_file", dest="analysis_file",
                            help="file with experiment configuration") 
    
    
    (options, args) = optionparser.parse_args()
    
    
    config = ExomePipelineConfig()
    config.from_xml(options.config_file)
    
    # Hard Code for the shell script.
    shell_script=[]
    
    header='#!/bin/bash'
    cac_server='oabalbin@flux-login.engin.umich.edu'
    call_python = '/nobackup/med-mctp/sw/dev/python/epd/epd-7.0-2-rh5-x86_64/bin/python'
    call_pipe = '/home2/oabalbin/workspace/exome2011/trunk/exome/jobs/run_analysis_bypatient_tcga.py'
    cac_bash_profile= '/home2/oabalbin/tcga_bash_profile.sh'
    irun_at = '--cluster'
    shell_script.append(header)
    ######
    ##TCGA-CH. Running 2-19-2012. 9:50
    #analysisToRun_dir='/exds/users/oabalbin/projects/exomes/tcga/analysisToRun/'
    ##TCGA-EJ  Running 2-19-2012 
    analysisToRun_dir='/exds/users/oabalbin/projects/exomes/tcga/analysisToRun3/'
    
    #analysisToRun_dir='/exds/users/oabalbin/projects/exomes/tcga/analysisToRun_tmp/'
    ext='xml'
        
    analysisToRun = list_analysis_files(analysisToRun_dir,ext)
    
    if not analysisToRun:
        logging.info("There were not analysis to run at this particular time")
        return
    
    for analysis_file in analysisToRun:
        
        if not ( check_raw_seq_exists(analysis_file, analysisToRun_dir) ):
            
            # Create analysis object
            analysis = ExomeAnalysisConfig()
            analysis.from_xml(analysis_file,analysisToRun_dir)
            ## files
            config_file_name=options.config_file.split('/')[-1]
            analysis_file_name=analysis_file.split('/')[-1]
            
            log_file_name=analysis.name+'.log'
            shell_file_name=analysis_file+'.sh'
            shell_file=open(os.path.join(analysisToRun_dir,shell_file_name),'w')
            
            laned=os.path.join(analysis.remote_root_dir,'lane')
            sampled=os.path.join(analysis.remote_root_dir,'sample')
            analysisd=os.path.join(analysis.remote_root_dir,'analysis')
            cac_mkdir=['ssh',cac_server,'\'mkdir', analysis.remote_root_dir,
                        laned,
                        sampled,
                        analysisd]
            
            for sample in analysis.samples:                
                for lane in sample.lanes:
                    lane_dir=os.path.join(os.path.join(laned,lane.name))
                    lane_align=os.path.join(os.path.join(lane_dir,'align'))
                    cac_mkdir.append(lane_dir)
                    cac_mkdir.append(lane_align)
            cac_mkdir.append('\'')
            # Make folder structure in the remote server
            shell_script.append( ",".join(cac_mkdir).replace(',',' ') )
            shell_file.write(",".join(shell_script).replace(',','\n'))
            shell_file.close()
            
            retcode=subprocess.call(["bash",shell_file_name])
            if retcode == 0:
                logging.info("Directories created in the remote server")
                # Copy bam files to the remote server
                copy_bam_files(analysis_file,analysisToRun_dir)
                
                if os.path.exists(shell_file_name):
                    os.remove(shell_file_name)
                    log_file_name=analysis.name+'.log'
                    shell_file_name=analysis_file+'.sh'
                    shell_file=open(os.path.join(analysisToRun_dir,shell_file_name),'w')
    
            else:
                logging.info("Directories were not created in the remote server")
                return JOB_ERROR

            shell_script=[]
            shell_script.append(header)
            #print args
            #retcode=subprocess.Popen(args)
            copy_run_files=["scp",options.config_file, analysis_file,
                            cac_server+':'+analysis.remote_root_dir]
            # Run the following command to start analysis.
            # Load modules
            
            args = ["" ]
            
            # Call the python script
            run_cmd = ["ssh",cac_server,'\''+'source', cac_bash_profile+';',
                       call_python,call_pipe,
                       '-r',os.path.join(analysis.remote_root_dir,config_file_name),
                       '-a',os.path.join(analysis.remote_root_dir,analysis_file_name),
                       irun_at,
                       '>',os.path.join(analysis.remote_root_dir,log_file_name)+'\'']
            
            shell_script.append( ",".join(copy_run_files).replace(',',' ') )
            # Uncomment
            shell_script.append( ",".join(run_cmd).replace(',',' ') )
            
            # Write the shell script.
            shell_file.write(",".join(shell_script).replace(',','\n'))
            shell_file.close()
            
            # Run the shell script
            print ["bash",shell_file_name]
            #retcode=1
            retcode=subprocess.call(["bash",shell_file_name])
            
            if retcode == 0:
                subprocess.Popen(["mv",analysis_file,analysis_file+'.submitted'])
                logging.info("Analysis file %s was submitted" % (analysis_file))
                
                
        else:
            logging.info("Analysis file %s was not submitted because not all sequences were found" % (analysis_file))
            pass
        

if __name__ == '__main__': main()