#!/usr/bin/env python
"""

    run_pecan.py
    [--log_file PATH]
    [--quiet]

"""

################################################################################
#
#   run_pecan
#
#
#   Copyright (c) 11/24/2009 Leo Goodstadt
#   
#   Permission is hereby granted, free of charge, to any person obtaining a copy
#   of this software and associated documentation files (the "Software"), to deal
#   in the Software without restriction, including without limitation the rights
#   to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#   copies of the Software, and to permit persons to whom the Software is
#   furnished to do so, subject to the following conditions:
#   
#   The above copyright notice and this permission notice shall be included in
#   all copies or substantial portions of the Software.
#   
#   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#   IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#   FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#   AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#   LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#   THE SOFTWARE.
#################################################################################



#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   options        


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

import sys, os
import os.path


# add self to search path for testing
if __name__ == '__main__':
    exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
    #sys.path.insert(0, "/net/cpp-group/Leo/inprogress/oss_projects/ruffus/installation/src")
    module_name = os.path.split(sys.argv[0])[1]
    module_name = os.path.splitext(module_name)[0];
else:
    module_name = __name__


if __name__ == '__main__':
    from optparse import OptionParser
    import StringIO
    
    parser = OptionParser(version="%prog 1.0", usage = "\n\n    %progs [options]")
    parser.add_option("-i", "--input_file", dest="input_file",
                      metavar="FILE", 
                      type="string",
                      help="Name and path of input file. "
                          "Defaults to reading from STDIN.")
    
    #
    #   general options: verbosity / logging
    # 
    parser.add_option("-v", "--verbose", dest = "verbose",
                      action="count", default=0,
                      help="Print more verbose messages for each additional verbose level.")
    parser.add_option("-L", "--log_file", dest="log_file",
                      metavar="FILE", 
                      type="string",
                      help="Name and path of log file")
    parser.add_option("--skip_parameter_logging", dest="skip_parameter_logging",
                        action="store_true", default=False,
                        help="Do not print program parameters to log.")
    parser.add_option("-D", "--debug", dest="debug",
                        action="count", default=0,
                        help="Set default program parameters in debugging mode.")
    
    
    
    #
    #   pipeline
    # 
    parser.add_option("-t", "--target_tasks", dest="target_tasks",
                      action="append",
                      default = list(),
                      metavar="JOBNAME", 
                      type="string",
                      help="Target task(s) of pipeline.")
    parser.add_option("--forced_tasks", dest="forced_tasks",
                      action="append",
                      default = list(),
                      metavar="JOBNAME", 
                      type="string",
                      help="Pipeline task(s) which will be included even if they are up to date.")
    parser.add_option("-j", "--jobs", dest="jobs",
                      default=1,
                      metavar="jobs", 
                      type="int",
                      help="Specifies  the number of jobs (commands) to run simultaneously.")
    parser.add_option("--flowchart", dest="flowchart",
                      metavar="FILE", 
                      type="string",
                      help="Print a dependency graph of the pipeline that would be executed "
                            "to FILE, but do not execute it.")
    parser.add_option("--flowchart_format", dest="flowchart_format",
                      metavar="FORMAT", 
                      type="string",
                      default = 'svg',
                      help="format of dependency graph file. Can be 'ps' (PostScript), "+
                      "'svg' 'svgz' (Structured Vector Graphics), " +
                      "'png' 'gif' (bitmap  graphics) etc ")
    parser.add_option("-n", "--just_print", dest="just_print",
                        action="store_true", default=False,
                        help="Print a description of the jobs that would be executed, "
                            "but do not execute them.")
    parser.add_option("--key_legend_in_graph", dest="key_legend_in_graph",
                        action="store_true", default=False,
                        help="Print out legend and key for dependency graph.")
    parser.add_option("--draw_graph_horizontally", dest="draw_horizontally",
                        action="store_true", default=False,
                        help="Draw horizontal dependency graph.")
    
    # get help string
    f =StringIO.StringIO()
    parser.print_help(f)
    helpstr = f.getvalue()
    (options, remaining_args) = parser.parse_args()
    
    
    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    if options.debug:
        options.log_file                = os.path.join("run_pecan.log")
        options.verbose                 = 5
        options.log_parameters          = True
    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    
    # 
    #   mandatory options
    # 
    from options import check_mandatory_options
    mandatory_options = []
    check_mandatory_options (options, mandatory_options, helpstr)
    
    
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   imports        


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

from ruffus import * 
from ruffus.ruffus_exceptions import JobSignalledBreak
import re

# biopython
from Bio import SeqIO

#from json import dumps
#from collections import defaultdict



#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Functions        


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#_________________________________________________________________________________________

#   _get_species_tree_via_ncbi

#_________________________________________________________________________________________
import get_ncbi_tax_tree
from get_ncbi_tax_tree import get_ncbi_tree, scientific_name_to_tax_id
def _get_species_tree_via_ncbi (species_names):
    """
    Get species tree in Newick using data from ncbi
    """

    #
    #   species names capitalized and with spaces
    # 
    ncbi_format_to_species = dict((s.capitalize().replace("_", " "), s) for s in species_names)



    #
    #   Add middle name if that is required in the ncbi taxonomy tree
    # 
    all_ncbi_names = get_ncbi_tax_tree.scientific_name_to_tax_id.keys()
    def remove_middle_part (n):
        if " " not in n:
            return n
        n = n.split( " ")
        return n[0] + " " + n [-1]
    # make sure three part names as well 2-part names look up correctly
    ncbi_add_middle = dict((remove_middle_part(n) ,n) for n in all_ncbi_names)
    ncbi_add_middle.update(dict( (n ,n) for n in all_ncbi_names) )
    ncbi_format_to_species = dict((ncbi_add_middle[n], s) 
                                    for n,s in ncbi_format_to_species.iteritems())

    # get tree from ncbi
    from get_ncbi_tax_tree import get_ncbi_tree
    species_tree = get_ncbi_tree (ncbi_format_to_species.keys(), True)

    import TreeTools
    species_tree_str = TreeTools.Tree2Newick(species_tree, False, False);

    # convert tree back into using underscore formatted original species name
    for n, s in ncbi_format_to_species.iteritems():
        species_tree_str = species_tree_str.replace(n, s)
    return species_tree_str

#_________________________________________________________________________________________

#   _get_fake_nested_species

#_________________________________________________________________________________________
def _get_fake_nested_species (cnt, species_name = 'a'):
    # first name
    fake_species_str = species_name
    for i in range(cnt - 1):
        fake_species_str = "(" + fake_species_str + "," + species_name + ")"
    return fake_species_str


#_________________________________________________________________________________________

#   construct_pecan_cmd_line

#_________________________________________________________________________________________
def construct_pecan_cmd_line (sequence_file_names, logger):
    """
    accepts sequence file names indexed by species
    """
    #  use list of all species in this target to obtain taxonomical sub-tree (from ncbi)
    all_species = sequence_file_names.keys()
    species_tree = _get_species_tree_via_ncbi (all_species)

    # 
    # Get order the species appears in the tree and sort file names accordingly
    # Represent multiple sequences from the same species using nested "fake" species
    # 
    species_in_order = [s for s in re.split("[();,]+",  species_tree) if len(s)]
    file_names_in_order = list()
    for s in species_in_order:
        cnt_files = len(sequence_file_names[s])
        fake_species = _get_fake_nested_species(cnt_files)
        #debug species tree using the following line
        #fake_species = _get_fake_nested_species(cnt_files, s)
        species_tree = species_tree.replace(s, fake_species)
        file_names_in_order.extend(sequence_file_names[s])



    file_names_in_order = " ".join(file_names_in_order)

    cmd_line = " ".join([   "java"                           ,  
                            "-cp {pecan_binary}"             ,
                            "-server"                        ,  
                            "bp.pecan.Pecan"                 ,  
                                #-k Output (Probcons) confidence values
                            "-l"                             ,
                                #-l Include not aligned probabilities in confidence values.
                                # Otherwise silly values when gaps
                            "-m"                             ,
                                #-m confidence values in MFA
                            "-n"                             ,
                                #-o confidence file name
                            "-p {confidences_file}"          ,


                            "-J {exonerate_binary}"          ,
                            "-G {output_file}"               ,  
                            "-A -B {log_file}"               ,
                            "-E '%s'" % species_tree         ,  
                            "-F %s"   % file_names_in_order  ,
                         ])
    return cmd_line


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Logger


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

if __name__ == '__main__':
    import logging
    from lg_program_logging import  setup_std_logging, MESSAGE
    from options import get_option_strings
    

    logger = logging.getLogger(module_name)
    setup_std_logging(logger, options.log_file, options.verbose)
    
    #
    #   Allow logging across Ruffus pipeline
    # 
    def get_logger (logger_name, args):
        return logger

    from ruffus.proxy_logger import *
    (logger_proxy,
     logging_mutex) = make_shared_logger_and_proxy (get_logger,
                                                    module_name,
                                                    {})

    #
    #   log programme parameters
    # 
    if not options.skip_parameter_logging:
        programme_name = os.path.split(sys.argv[0])[1]
        logger.info("%s %s" % (programme_name, " ".join(get_option_strings(parser, options))))

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Pipeline


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Main logic


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
    
    


#
#   Necessary to protect the "entry point" of the program under windows.
#       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
#
if __name__ == '__main__':
    if options.debug:
        class Test_pecan_wrapper(unittest.TestCase):

            #       self.assertEqual(self.seq, range(10))
            #       self.assert_(element in self.seq)
            #       self.assertRaises(ValueError, random.sample, self.seq, 20)



            def test_function(self):
                """
                    test 
                """
                sequence_file_names = {"homo_sapiens": ["test1", "test2"],
                                       "mus_musculus": ["test3", "test4"]}
                print construct_pecan_cmd_line (sequence_file_names, logger)

        #
        #   call unit test without parameters
        #     

        if sys.argv.count("--debug"):
            sys.argv.remove("--debug")
        unittest.main()

    elif options.just_print:
        pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
                            verbose=options.verbose)

    elif options.flowchart:
        pipeline_printout_graph (     open(options.flowchart, "w"),
                             options.flowchart_format,
                             options.target_tasks,
                             options.forced_tasks,
                             draw_vertically = not options.draw_horizontally,
                             no_key_legend  = not options.key_legend_in_graph)
    else:
        pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
                            logger = stderr_logger if options.verbose else black_hole_logger,
                            verbose=options.verbose)








