#!/usr/bin/env python
"""

    run_tba.py
    [--log_file PATH]
    [--quiet]

"""

################################################################################
#
#   run_tba
#
#
#   Copyright (c) 11/24/2009 Leo Goodstadt
#
#   Permission is hereby granted, free of charge, to any person obtaining a copy
#   of this software and associated documentation files (the "Software"), to deal
#   in the Software without restriction, including without limitation the rights
#   to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#   copies of the Software, and to permit persons to whom the Software is
#   furnished to do so, subject to the following conditions:
#
#   The above copyright notice and this permission notice shall be included in
#   all copies or substantial portions of the Software.
#
#   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#   IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#   FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#   AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#   LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#   THE SOFTWARE.
#################################################################################



#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   options


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

import sys, os
import os.path


# add self to search path for testing
if __name__ == '__main__':
    exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
    sys.path.insert(0, "/net/cpp-group/Leo/inprogress/oss_projects/ruffus/installation/src")
    module_name = os.path.split(sys.argv[0])[1]
    module_name = os.path.splitext(module_name)[0];
else:
    module_name = __name__


if __name__ == '__main__':
    from optparse import OptionParser
    import StringIO

    parser = OptionParser(version="%prog 1.0", usage = "\n\n    %progs [options]")
    parser.add_option("-i", "--input_file", dest="input_file",
                      metavar="FILE",
                      #action="append",
                      type="string",
                      help="species name and path of input sequence files. Tab delimited")

    #
    #   general options: verbosity / logging
    #
    parser.add_option("-v", "--verbose", dest = "verbose",
                      action="count", default=0,
                      help="Print more verbose messages for each additional verbose level.")
    parser.add_option("-L", "--log_file", dest="log_file",
                      metavar="FILE",
                      type="string",
                      help="Name and path of log file")
    parser.add_option("-r", "--result_file", dest="result_file",
                      metavar="FILE",
                      type="string",
                      help="Name and path of file containing tba results")
    parser.add_option("--skip_parameter_logging", dest="skip_parameter_logging",
                        action="store_true", default=False,
                        help="Do not print program parameters to log.")
    parser.add_option("-D", "--debug", dest="debug",
                        action="count", default=0,
                        help="Set default program parameters in debugging mode.")
    parser.add_option("--queue_cmd_prefix", dest="queue_cmd_prefix",
                        metavar="CMD",
                        type="str",
                        default = 'qrsh -now n -cwd -p -6 -N {job_name} -v BASH_ENV=~/.bashrc -q medium_jobs.q "{cmd}"',
                        help="Command to run jobs on the cluster.")
    parser.add_option("--bin_directory", dest="bin_directory",
                      metavar="PATH",
                      default = '/cpp-software/bin',
                      type="string",
                      help="Path to tba binaries")
    parser.add_option("--working_directory", dest="working_directory",
                      metavar="PATH",
                      default = 'temp',
                      type="string",
                      help="Path to working directory")


    #
    #   pipeline
    #
    parser.add_option("-t", "--target_tasks", dest="target_tasks",
                      action="append",
                      default = list(),
                      metavar="JOBNAME",
                      type="string",
                      help="Target task(s) of pipeline.")
    parser.add_option("--forced_tasks", dest="forced_tasks",
                      action="append",
                      default = list(),
                      metavar="JOBNAME",
                      type="string",
                      help="Pipeline task(s) which will be included even if they are up to date.")
    parser.add_option("-j", "--jobs", dest="jobs",
                      default=50,
                      metavar="jobs",
                      type="int",
                      help="Specifies  the number of jobs (commands) to run simultaneously.")
    parser.add_option("--flowchart", dest="flowchart",
                      metavar="FILE",
                      type="string",
                      help="Print a dependency graph of the pipeline that would be executed "
                            "to FILE, but do not execute it.")
    parser.add_option("--flowchart_format", dest="flowchart_format",
                      metavar="FORMAT",
                      type="string",
                      default = 'svg',
                      help="format of dependency graph file. Can be 'ps' (PostScript), "+
                      "'svg' 'svgz' (Structured Vector Graphics), " +
                      "'png' 'gif' (bitmap  graphics) etc ")
    parser.add_option("-n", "--just_print", dest="just_print",
                        action="store_true", default=False,
                        help="Print a description of the jobs that would be executed, "
                            "but do not execute them.")
    parser.add_option("--key_legend_in_graph", dest="key_legend_in_graph",
                        action="store_true", default=False,
                        help="Print out legend and key for dependency graph.")
    parser.add_option("--draw_graph_horizontally", dest="draw_horizontally",
                        action="store_true", default=False,
                        help="Draw horizontal dependency graph.")

    # get help string
    f =StringIO.StringIO()
    parser.print_help(f)
    helpstr = f.getvalue()
    (options, remaining_args) = parser.parse_args()


    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    if options.debug:
        options.log_file                = os.path.join("run_tba.log")
        options.verbose                 = 5
        options.log_parameters          = True
        options.bin_directory           = '/cpp-software/bin'
        options.working_directory       = "/net/cpp-group/Leo/inprogress/brain_stuff/working_dir/tba"
        options.result_file             =  "/net/cpp-group/Leo/inprogress/brain_stuff/working_dir/tba.maf"
        options.input_file              =  "/net/cpp-group/Leo/inprogress/brain_stuff/working_dir/174.sequence_files"

    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

    if options.queue_cmd_prefix == None or len(options.queue_cmd_prefix) == 0:
        options.queue_cmd_prefix = '{cmd}'
    if '{cmd}' not in options.queue_cmd_prefix:
        raise Exception("--queue_cmd_prefix must contain the term '{cmd}'")

    #
    #   mandatory options
    #
    from options import check_mandatory_options
    mandatory_options = []
    check_mandatory_options (options, mandatory_options, helpstr)


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   imports


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

from ruffus import *
from ruffus.ruffus_exceptions import JobSignalledBreak
from ruffus.ruffus_utility import shorten_filenames_encoder
import re

# biopython
from Bio import SeqIO
import subprocess

from json import dumps
from collections import defaultdict





#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Logger


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

if __name__ == '__main__':
    import logging
    from lg_program_logging import  setup_std_logging, MESSAGE
    from options import get_option_strings


    logger = logging.getLogger(module_name)
    setup_std_logging(logger, options.log_file, options.verbose)

    #
    #   Allow logging across Ruffus pipeline
    #
    def get_logger (logger_name, args):
        return logger

    from ruffus.proxy_logger import *
    (logger_proxy,
     logging_mutex) = make_shared_logger_and_proxy (get_logger,
                                                    module_name,
                                                    {})

    #
    #   log programme parameters
    #
    if not options.skip_parameter_logging:
        programme_name = os.path.split(sys.argv[0])[1]
        logger.info("%s %s" % (programme_name, " ".join(get_option_strings(parser, options))))

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Functions


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#_________________________________________________________________________________________

#   convert_species_to_scientific_names

#_________________________________________________________________________________________
import get_ncbi_tax_tree
from get_ncbi_tax_tree import get_ncbi_tree, scientific_name_to_tax_id
def convert_species_to_scientific_names (species_common_names):
    """
    scientific names in ncbi format
    """

    #
    #   Turn ncbi scientific name into underscore joined species names
    #       with or without the middle name
    #   We can then use these to look up the species name however it is specified
    #
    all_ncbi_names = get_ncbi_tax_tree.scientific_name_to_tax_id.keys()
    def remove_middle_part (n):
        if " " not in n:
            return n
        n = n.split(" ")
        return (n[0] + "_" + n [-1]).lower()
    # make sure three part names as well 2-part names look up correctly
    species_to_scientific_name = dict((remove_middle_part(n), n) for n in all_ncbi_names)
    species_to_scientific_name.update(dict((n.replace(" ", "_").lower(), n) for n in all_ncbi_names))

    scientific_names = [species_to_scientific_name[s[0]] for s in species_common_names]
    return (dict(zip([s[1] for s in species_common_names], scientific_names)),
            dict(zip(scientific_names, [s[1] for s in species_common_names])))



#_________________________________________________________________________________________

#   get_species_tree_via_ncbi

#_________________________________________________________________________________________
def get_species_tree_via_ncbi (scientific_to_species_name):
    """
    Get species tree in Newick using data from ncbi
    """

    # get tree from ncbi
    from get_ncbi_tax_tree import get_ncbi_tree
    species_tree = get_ncbi_tree (scientific_to_species_name.keys(), True)

    import TreeTools
    species_tree_str = TreeTools.Tree2Newick(species_tree, False, False);

    # convert tree back into using underscore formatted original species name
    for n, s in scientific_to_species_name.iteritems():
        species_tree_str = species_tree_str.replace(n, s)
    return species_tree_str

#_________________________________________________________________________________________

#   get_fake_nested_species

#_________________________________________________________________________________________
def get_fake_nested_species (species_names):
    # first name
    cnt = len(species_names)
    fake_species_str = species_names[0]
    for i in range(cnt - 1):
        # add *next* species hence "i + 1"
        fake_species_str = "(" + fake_species_str + "," + species_names[i + 1] + ")"
    return fake_species_str


from run_cmd import run_cmd


#_________________________________________________________________________________________

#   get_species_tree_str

#_________________________________________________________________________________________
def get_species_tree_str (seq_files_per_species, TBA_format=True):
    """
    Accepts sequence file names indexed by species and common name
    Species names are underscore joined

    Returns species tree in parenthesis form sans commas and
    list of placental /non-placental species

    All returned names use the filename in seq_files_per_species rather than the
    scientific names
    """
    #  use list of all species in this target to obtain taxonomical sub-tree (from ncbi)
    (   species_to_scientific_name,
        scientific_to_species_name) = convert_species_to_scientific_names(seq_files_per_species.keys())

    species_tree = get_species_tree_via_ncbi (scientific_to_species_name)

    # get species which are placental
    from get_ncbi_tax_tree import get_placental_species
    placental_species_list = set(get_placental_species(scientific_to_species_name.keys()))

    placental_file_names = set()
    non_placental_file_names = set()

    for species in seq_files_per_species.keys():
        scientific_name = species_to_scientific_name[species[1]]
        file_names = [os.path.split(p)[1] for p in seq_files_per_species[species]]
        fake_species = get_fake_nested_species(file_names)
        species_tree = species_tree.replace(species[1], fake_species)

        # save placental files
        if scientific_name in placental_species_list:
            placental_file_names.update(file_names)
        else:
            non_placental_file_names.update(file_names)

    if TBA_format:
        species_tree_str = species_tree.replace(',', ' ').rstrip(';')

    return species_tree_str, placental_file_names, non_placental_file_names


#_________________________________________________________________________________________

#   setup_symbolic_links_for_seq_files
#
#_________________________________________________________________________________________
def setup_symbolic_links_for_seq_files (seq_files_per_species):
    """
    make symbolic links in working directory to original sequence files
    """
    symbolic_links_per_species = defaultdict(list)
    for species in seq_files_per_species:
        for file_path in seq_files_per_species[species]:
            file_name = os.path.split(file_path)[1]
            destination_link = os.path.join(options.working_directory, file_name)
            if os.path.exists(destination_link):
                os.unlink(destination_link)
            os.symlink(file_path, destination_link)
            symbolic_links_per_species[species].append(destination_link)
    return symbolic_links_per_species




#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Pipeline


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
import time
def sleep_a_while ():
    time.sleep(1)


#def setup_pipeline ():
blastz_specs_path = os.path.join(options.working_directory, "blastz.specs")
hox_file_name    = os.path.join(options.working_directory, "HoxD55.q")
hox_file_path    = os.path.join(options.working_directory, hox_file_name)
if not os.path.exists(options.working_directory):
    os.makedirs(options.working_directory)

#
#  parse species and filenames
#
import parse_delimited
orig_seq_files_per_species = defaultdict(list)
for (species, common_name, seq_path) in parse_delimited.parse_delimited (open(options.input_file)):
    orig_seq_files_per_species[(species, common_name)].append(seq_path)


#
#   set up local symbolic links
#
seq_files_per_species = setup_symbolic_links_for_seq_files (orig_seq_files_per_species)

#
# sort out species
#
(   species_tree_str,
    placental_file_names,
    non_placental_file_names) = get_species_tree_str (seq_files_per_species)



#_________________________________________________________________________________________

#   write_hox_file

#_________________________________________________________________________________________
@files(None, hox_file_path)
def write_hox_file(no_input_file_name, output_file_name):
    open(output_file_name, "w").write("""     A    C    G    T
    91  -90  -25 -100
   -90  100 -100  -25
   -25 -100  100  -90
  -100  -25  -90  91""")

#_________________________________________________________________________________________

#   write_blastz_spec_file

#_________________________________________________________________________________________
@files(None, blastz_specs_path)
def write_blastz_spec_file(no_input_file_name, output_file_name):
    # convert species tree to string
    lines = []
    lines.append('#define PLACENTAL %s'     % ' '.join(placental_file_names)       )
    lines.append('#define NON_PLACENTAL %s' % ' '.join(non_placental_file_names)   )
    lines.append('PLACENTAL : PLACENTAL'                                           )
    lines.append('        B=2 C=0'                                                 )
    lines.append('NON_PLACENTAL : *'                                               )
    lines.append('        Q=%s'             % hox_file_name                        )
    open(output_file_name, "w").write("\n".join(lines) + "\n")


#_________________________________________________________________________________________

#   all_bz_parameters
#_________________________________________________________________________________________
def all_bz_parameters ():
    """
    Runs all_bz only to find out what it *would* have run.
    We shall run these jobs in parallel
    """
    binary_exe = os.path.join(options.bin_directory, "all_bz")
    cmd_line = '%s - "%s" "blastz.specs"' % (binary_exe, species_tree_str)
    stdout_str, stderr_str = run_cmd(cmd_line, "run all_bz", logging_mutex, logger_proxy,
                                    cwd=options.working_directory)
    params = []
    for line in stdout_str.split("\n"):
        cmds = line.split("|")
        fields = cmds[0].split(" ")
        if not len(fields) or fields[0] != "blastzWrapper" :
            continue
        species_1, species_2 = fields[1:3]
        species_pair            = "%s__vs__%s" % (species_1, species_2)
        logger.info("tba for %s" % species_pair)

        species_1, species_2, species_pair = [os.path.join(options.working_directory, s) for s in
                                              (species_1, species_2, species_pair)]

        params = (
                        [species_1, species_2],            # inputs  = species pair
                        #[species_pair + ".lav",            # outputs = .orig.maf
                        # species_pair + ".lav_completed"],
                        [species_pair + ".singular",         # outputs = .orig.maf
                         species_pair + ".s_cov2_completed"],
                        " ".join(fields[3:]),              # blastz options
                        logger_proxy, logging_mutex,
                        options.bin_directory
                        )
        yield params

##_________________________________________________________________________________________
#
##   do_blastz
#
##_________________________________________________________________________________________
#@follows(write_blastz_spec_file, write_hox_file)
#@files(all_bz_parameters)
#def do_blastz(input_files, output_files, blastz_params, logger_proxy, logging_mutex, bin_directory):
#    """
#    Runs blastz on pairwise species going up tree
#    """
#    species1_file, species2_file = input_files
#    lav_file, flag_file_name = output_files
#
#    blastzWrapper = os.path.join(bin_directory, "blastzWrapper")
#    cmd = "{blastzWrapper} {species1_file} {species2_file} {blastz_params}"
#    cmd = cmd.format  ( blastzWrapper = blastzWrapper,
#                        species1_file = species1_file,
#                        species2_file = species2_file,
#                        blastz_params = blastz_params,
#                        cwd=options.working_directory,
#        )
#
#    s1, s2 = [os.path.split(s)[1] for s in species1_file, species2_file]
#    short_name = ("A" + s1[0:4] + "_" + s2[0:4])
#
#    msg = "run blastz on %s %s " % (species1_file, species2_file)
#    run_cmd (cmd, msg, logging_mutex, logger_proxy, job_name = short_name,
#                queue_cmd_prefix = options.queue_cmd_prefix, stdout_file_name = lav_file)
#    open(flag_file_name, "w")
#
#def get_original_file_names (pair_file_path):
#    pair_file_dir, pair_file_name = os.path.split(pair_file_path)
#    orig_files = os.path.splitext(pair_file_name)[0].split("__vs__")
#    return [os.path.join(pair_file_dir, s) for s in orig_files]
#
##_________________________________________________________________________________________
#
##   do_lav2maf
#
##_________________________________________________________________________________________
#@posttask(sleep_a_while)
#@transform(do_blastz, suffix(".lav"), [".maf", ".maf_completed"], logger_proxy, logging_mutex, options.bin_directory)
#def do_lav2maf(input_files, output_files, logger_proxy, logging_mutex, bin_directory):
#    """
#    Runs blastz on pairwise species going up tree
#    """
#    lav_file = input_files[0]
#    species1_file, species2_file = get_original_file_names (lav_file)
#    maf_file, flag_file_name = output_files
#
#    lav2maf = os.path.join(bin_directory, "lav2maf")
#    cmd     = "{lav2maf} {lav_file} {species1_file} {species2_file}"
#    cmd     = cmd.format  ( lav2maf  = lav2maf,
#                            lav_file = lav_file,
#                            species1_file = species1_file,
#                            species2_file = species2_file,
#                        )
#
#    s1, s2 = [os.path.split(s)[1] for s in species1_file, species2_file]
#    short_name = ("B" + s1[0:4] + "_" + s2[0:4])
#
#    msg = "run lav2maf on %s " % (lav_file)
#    run_cmd (cmd, msg, logging_mutex, logger_proxy, job_name = short_name,
#                queue_cmd_prefix = options.queue_cmd_prefix, stdout_file_name = maf_file)
#    open(flag_file_name, "w")
#
##_________________________________________________________________________________________
#
##   do_maf_sort
#
##_________________________________________________________________________________________
#@posttask(sleep_a_while)
#@transform(do_lav2maf, suffix(".maf"), [".orig.maf", ".orig_maf_completed"], logger_proxy, logging_mutex, options.bin_directory)
#def do_maf_sort(input_files, output_files, logger_proxy, logging_mutex, bin_directory):
#    """
#    Runs blastz on pairwise species going up tree
#    """
#    maf_file = input_files[0]
#    species1_file, species2_file = get_original_file_names (maf_file)
#    maf_sort_file, flag_file_name = output_files
#
#    maf_sort = os.path.join(bin_directory, "maf_sort")
#    cmd     = "{maf_sort} {maf_file} {species1_file}"
#    cmd     = cmd.format  ( maf_sort  = maf_sort,
#                            maf_file = maf_file,
#                            species1_file = species1_file,
#                            cwd=options.working_directory,
#                        )
#
#    s1, s2 = [os.path.split(s)[1] for s in species1_file, species2_file]
#    short_name = ("C" + s1[0:4] + "_" + s2[0:4])
#
#    msg = "run maf_sort on %s " % (maf_file)
#    run_cmd (cmd, msg, logging_mutex, logger_proxy, job_name = short_name,
#                queue_cmd_prefix = options.queue_cmd_prefix, stdout_file_name = maf_sort_file)
#    open(flag_file_name, "w")
#
#
#
#
#
##_________________________________________________________________________________________
#
##   do_single_cov2
#
##_________________________________________________________________________________________
#@posttask(sleep_a_while)
#@transform(do_maf_sort, suffix(".orig.maf"), [".singular", ".s_cov2_completed"], logger_proxy, logging_mutex)
#def do_single_cov2(input_files, output_file, logger_proxy, logging_mutex):
#    """
#    single_cov2 removes lower-scoring alignable in regions with more than one
#    resulting alignment such that each position in sequence 1 aligns to only one position in
#    sequence 2.
#    """
#    binary_exe = os.path.join(options.bin_directory, "single_cov2")
#    cmd = "%s %s" % (binary_exe, input_files[0])
#    msg = "run single_cov2 on %s " % (input_files[0])
#    run_cmd (cmd, msg, logging_mutex, logger_proxy,
#                cwd=options.working_directory,
#                job_name = "scov_" + os.path.split(input_files[0])[1],
#                queue_cmd_prefix = options.queue_cmd_prefix, stdout_file_name = output_file[0])
#
#    flag_file_name = output_file[1]
#    open(flag_file_name, "w")
#

#_________________________________________________________________________________________

#   do_all_bz

#_________________________________________________________________________________________
@follows(write_blastz_spec_file, write_hox_file)
@files(all_bz_parameters)
def do_all_bz(input_files, output_files, blastz_params, logger_proxy, logging_mutex, bin_directory):
    """
    Runs blastz on pairwise species going up tree
    """
    species1_file, species2_file = [os.path.split(f)[1] for f in input_files]
    single_maf_file, flag_file_name = output_files

    file_stem = os.path.split(single_maf_file)[1].replace(".singular", "")

    blastzWrapper = os.path.join(bin_directory, "blastzWrapper")
    lav2maf       = os.path.join(bin_directory, "lav2maf")
    maf_sort      = os.path.join(bin_directory, "maf_sort")
    single_cov2   = os.path.join(options.bin_directory, "single_cov2")

    lav_file             = file_stem + ".lav"
    maf_file             = file_stem + ".maf"
    sorted_maf_file      = file_stem + ".orig.maf"
    #single_maf_file      = file_stem + ".singular"



    cmd = ("{blastzWrapper} {species1_file} {species2_file} {blastz_params} > {lav_file}; "
           "{lav2maf} {lav_file} {species1_file} {species2_file} > {maf_file}; "
           "{maf_sort} {maf_file} {species1_file} > {sorted_maf_file};"
           "{single_cov2} {sorted_maf_file}")

    cmd = cmd.format  (
                        blastzWrapper   = blastzWrapper  ,
                        lav2maf         = lav2maf        ,
                        maf_sort        = maf_sort       ,
                        single_cov2     = single_cov2    ,
                        species1_file   = species1_file  ,
                        species2_file   = species2_file  ,
                        blastz_params   = blastz_params  ,
                        lav_file        = lav_file       ,
                        maf_file        = maf_file       ,
                        sorted_maf_file = sorted_maf_file,
                        single_maf_file = single_maf_file,
        )


    s1, s2 = [os.path.split(s)[1] for s in species1_file, species2_file]
    short_name = ("a_" + s1[0:4] + "_" + s2[0:4])
    msg = "run blastz on %s %s " % (species1_file, species2_file)

    run_cmd (cmd, msg, logging_mutex, logger_proxy,
                cwd=options.working_directory,
                job_name = short_name,
                queue_cmd_prefix = options.queue_cmd_prefix, stdout_file_name = single_maf_file)
    open(flag_file_name, "w")


#_________________________________________________________________________________________

#   do_run_tba

#_________________________________________________________________________________________
@posttask(sleep_a_while)
@merge(do_all_bz, [options.result_file,
                        os.path.join(options.working_directory, "tba.completed")],
                        species_tree_str, logger_proxy, logging_mutex)
def do_run_tba(input_files, output_file, species_tree_str, logger_proxy, logging_mutex):
    """
    Runs TBA on pairwise alignments
    """
    binary_exe = os.path.join(options.bin_directory, "tba")
    for file_path, ignore in input_files:
        destination_link = file_path.replace("__vs__", ".").replace("singular", "sing.maf")
        if not os.path.exists(destination_link):
            os.symlink(file_path, destination_link)

    cmd = "%s \'%s\' *.sing.maf %s" % (binary_exe, species_tree_str, output_file[0])
    msg = "run tba on *.sing.maf"
    run_cmd (cmd, msg, logging_mutex, logger_proxy,
                job_name = "tba_" + os.path.split(output_file[0])[1], cwd=options.working_directory,
                queue_cmd_prefix = options.queue_cmd_prefix)


    flag_file_name = output_file[1]
    open(flag_file_name, "w")

    #def run_tba(target, directory):
    #    print 'getting species tree for ' + target
    #    species_tree = compile_species_tree(target, directory)
    #    print species_tree
    #    print 'running TBA for ' + target
    #    os.chdir(os.path.join(directory, target))
    #    os.system('tba \"' + species_tree + '\" *.*.maf ' + (os.path.join(unmasked_tba_results, target, target + '.maf')))
    #    os.chdir(working_dir)

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Main logic


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888




#
#   Necessary to protect the "entry point" of the program under windows.
#       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
#
if __name__ == '__main__':
    if not options.target_tasks:
           options.target_tasks =  ["do_run_tba"]
    #setup_pipeline ()

    if options.debug:
        class Test_tba_wrapper(unittest.TestCase):

            #       self.assertEqual(self.seq, range(10))
            #       self.assert_(element in self.seq)
            #       self.assertRaises(ValueError, random.sample, self.seq, 20)



            def test_function(self):
                """
                    test
                """

                pipeline_printout(sys.stdout, ["do_run_tba"], options.forced_tasks,
                                    verbose=options.verbose)
                pipeline_run(["do_run_tba"], options.forced_tasks,
                                    verbose=options.verbose)

        #
        #   call unit test without parameters
        #

        if sys.argv.count("--debug"):
            sys.argv.remove("--debug")
        unittest.main()

    elif options.just_print:
        pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
                            verbose=options.verbose)

    elif options.flowchart:
        pipeline_printout_graph (     open(options.flowchart, "w"),
                             options.flowchart_format,
                             options.target_tasks,
                             options.forced_tasks,
                             draw_vertically = not options.draw_horizontally,
                             no_key_legend  = not options.key_legend_in_graph)
    else:
        pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
                            logger = stderr_logger if options.verbose else black_hole_logger,
                            verbose=options.verbose)








