#!/usr/bin/env python
"""

    loci_liftover.py
    [--log_file PATH]
    [--quiet]

"""

################################################################################
#
#   loci_liftover
#
#
#   Copyright (c) 12/11/2009 Leo Goodstadt
#   
#   Permission is hereby granted, free of charge, to any person obtaining a copy
#   of this software and associated documentation files (the "Software"), to deal
#   in the Software without restriction, including without limitation the rights
#   to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#   copies of the Software, and to permit persons to whom the Software is
#   furnished to do so, subject to the following conditions:
#   
#   The above copyright notice and this permission notice shall be included in
#   all copies or substantial portions of the Software.
#   
#   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#   IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#   FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#   AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#   LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#   THE SOFTWARE.
#################################################################################



#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   options        


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

import sys, os
import os.path


# add self to search path for testing
if __name__ == '__main__':
    exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
    sys.path.append(os.path.abspath(os.path.join(exe_path,"..", "python_modules")))
    #sys.path.insert(0, "/net/cpp-group/Leo/inprogress/oss_projects/ruffus/installation/src")
    module_name = os.path.split(sys.argv[0])[1]
    module_name = os.path.splitext(module_name)[0];
else:
    module_name = __name__


if __name__ == '__main__':
    from optparse import OptionParser
    import StringIO
    
    parser = OptionParser(version="%prog 1.0", usage = "\n\n    %progs [options]")
    parser.add_option("-i", "--input_file", dest="input_file",
                      metavar="FILE", 
                      type="string",
                      help="Name and path of file containing loci to be lifted. ")
    parser.add_option("-l", "--liftover_parameters", dest="liftover_parameters",
                      metavar="FILE", 
                      type="string",
                      help="Name and path of liftover parameters file. ")
    
    #
    #   general options: verbosity / logging
    # 
    parser.add_option("-v", "--verbose", dest = "verbose",
                      action="count", default=0,
                      help="Print more verbose messages for each additional verbose level.")
    parser.add_option("-L", "--log_file", dest="log_file",
                      metavar="FILE", 
                      type="string",
                      help="Name and path of log file")
    parser.add_option("--skip_parameter_logging", dest="skip_parameter_logging",
                        action="store_true", default=False,
                        help="Do not print program parameters to log.")
    parser.add_option("-D", "--debug", dest="debug",
                        action="count", default=0,
                        help="Set default program parameters in debugging mode.")
    
    
    
    #
    #   pipeline
    # 
    parser.add_option("-t", "--target_tasks", dest="target_tasks",
                      action="append",
                      default = list(),
                      metavar="JOBNAME", 
                      type="string",
                      help="Target task(s) of pipeline.")
    parser.add_option("--forced_tasks", dest="forced_tasks",
                      action="append",
                      default = list(),
                      metavar="JOBNAME", 
                      type="string",
                      help="Pipeline task(s) which will be included even if they are up to date.")
    parser.add_option("-j", "--jobs", dest="jobs",
                      default=1,
                      metavar="jobs", 
                      type="int",
                      help="Specifies  the number of jobs (commands) to run simultaneously.")
    parser.add_option("--flowchart", dest="flowchart",
                      metavar="FILE", 
                      type="string",
                      help="Print a dependency graph of the pipeline that would be executed "
                            "to FILE, but do not execute it.")
    parser.add_option("--flowchart_format", dest="flowchart_format",
                      metavar="FORMAT", 
                      type="string",
                      default = 'svg',
                      help="format of dependency graph file. Can be 'ps' (PostScript), "+
                      "'svg' 'svgz' (Structured Vector Graphics), " +
                      "'png' 'gif' (bitmap  graphics) etc ")
    parser.add_option("-n", "--just_print", dest="just_print",
                        action="store_true", default=False,
                        help="Print a description of the jobs that would be executed, "
                            "but do not execute them.")
    parser.add_option("--key_legend_in_graph", dest="key_legend_in_graph",
                        action="store_true", default=False,
                        help="Print out legend and key for dependency graph.")
    parser.add_option("--draw_graph_horizontally", dest="draw_horizontally",
                        action="store_true", default=False,
                        help="Draw horizontal dependency graph.")
    
    # get help string
    f =StringIO.StringIO()
    parser.print_help(f)
    helpstr = f.getvalue()
    (options, remaining_args) = parser.parse_args()
    
    
    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    if options.debug:
        options.log_file                      = os.path.join("loci_liftover.log")
        options.verbose                       = 5
        options.log_parameters                = True
        options.liftover_parameters           = "/net/cpp-group/Leo/inprogress/brain_stuff/data/ucsc/Mm9.liftover_data"            
        options.input_file                    = "loci_for_lifting.data"
        options.working_directory             = "/net/cpp-group/Leo/inprogress/brain_stuff/working_dir/liftover"
        options.lifted_results                = "/net/cpp-group/Leo/inprogress/brain_stuff/data/ucsc/Mm9.lifted_loci"
        options.liftover_resources_directory  = "/net/cpp-group/Leo/inprogress/brain_stuff/data/liftover_resources"
        
    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    
    # 
    #   mandatory options
    # 
    from options import check_mandatory_options
    mandatory_options = []
    check_mandatory_options (options, mandatory_options, helpstr)
    
    
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   imports        


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

from ruffus import * 
from ruffus.ruffus_exceptions import JobSignalledBreak

#from json import dumps
#from collections import defaultdict



#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Functions        


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Logger


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

iif __name__ == '__main__':
    import logging
    from lg_program_logging import  setup_std_logging, MESSAGE
    from options import get_option_strings
    

    logger = logging.getLogger(module_name)
    setup_std_logging(logger, options.log_file, options.verbose)
    
    #
    #   Allow logging across Ruffus pipeline
    # 
    def get_logger (logger_name, args):
        return logger

    from ruffus.proxy_logger import *
    (logger_proxy,
     logging_mutex) = make_shared_logger_and_proxy (get_logger,
                                                    module_name,
                                                    {})

    #
    #   log programme parameters
    # 
    if not options.skip_parameter_logging:
        programme_name = os.path.split(sys.argv[0])[1]
        logger.info("%s %s" % (programme_name, " ".join(get_option_strings(parser, options))))

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Pipeline


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

            
            
            
            
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Lift over seed transcript regions to get target regions for other UCSC genomes


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#_________________________________________________________________________________________
#
#   overlapping_loci
#
#_________________________________________________________________________________________
def overlapping_loci (*loci):
    """
    takes loci in [chrm, beg, end] format and 
    returns if overlap
    """
    # same contigs?
    if loci[0][0] != loci[1][0]:
        return False

    # does first loci end extend past second loci beg?
    loci = sorted(loci)
    return loci[0][2] > loci[1][1]

#_________________________________________________________________________________________
#
#   merge_neighboring_loci
#
#_________________________________________________________________________________________
def merge_neighboring_loci (loci, adjacency_threshold):
    """
    merge adjacent or overlapping loci if they are closer than adjacency_threshold
    """
    loci = sorted((chrm, strand, beg, end, region) for region, chrm, beg, end, strand in loci)
    merged_loci = []
    curr_locus = list(loci[0])
    for locus in loci[1:]:
        # if same contigs and overlap merge locus into curr_locus
        if (locus[0:2] == curr_locus[0:2] and
            locus[1] - curr_locus [2] < adjacency_threshold):
            curr_locus[2] = max(locus[2], curr_locus[2])

            # append regions
            curr_locus[3] = " ".join(sorted((curr_locus[3], locus[3])))

        # save current locus and move on
        else:
            merged_loci.append(list(curr_locus))
            curr_locus = list(locus)
            continue

    merged_loci.append(list(curr_locus))
    return sorted((region, chrm, beg, end, strand) for chrm, strand, beg, end, region in merged_loci)




#_________________________________________________________________________________________
#
#   cache_lifted_genomic_loci
#
#_________________________________________________________________________________________
def cache_lifted_genomic_loci (logger):
    """
    Get cached seed transcript regions on reference genomes
    liftover onto target genomes
    """

    logger.log(MESSAGE, "Save loci of seed regions lifted onto ucsc genomes")

    #
    #   make directories if necessary
    #     
    if not os.path.exists(liftover_working_dir):
        os.makedirs(liftover_working_dir)


    # e.g. target_loci_on_reference[target][contig/beg/end] = "5"/513123/531231
    seed_transcript_expanded_loci = get_seed_transcript_expanded_loci(logger)

    #
    #   only include targets with nisc clones
    # 
    targets_with_nisc_clones = sorted(get_targets_with_nisc_clones())
    for target in seed_transcript_expanded_loci.keys():
        if target not in targets_with_nisc_clones:
            del seed_transcript_expanded_loci[target]



    #
    #   write out reference coordinates in preparation for running lifover 
    # 

    loci_to_lift_file_name  = os.path.join(liftover_working_dir, "reference_genome.liftover.data")
    loci_to_lift_file = open(loci_to_lift_file_name, "w")

    for target, loci in seed_transcript_expanded_loci.iteritems():
        for locus in loci:
            loci_to_lift_file.write( "\t".join(map(str, locus)) + "\n")
    loci_to_lift_file.close()


    #
    #   lifted loci results file
    # 
    lifted_genomic_loci = open(lifted_genomic_loci_file,  "w")
    lifted_genomic_loci.write("\t".join (["target", "species", "contig", 
                                          "beg", "end", "length", "strand", "order"])+ "\n")


    #
    #   Start by add reference "seed" species loci to list of lifted loci
    # 
    for target, loci in sorted(seed_transcript_expanded_loci.iteritems()):
        for locus in loci:
            chrm, beg, end, ignore_target, ignore_wierdparameter, strand = locus
            # only interested in the loci after padding on each side
            if "expanded_" not in ignore_target:
                continue
            lifted_genomic_loci.write("\t".join(map(str, [target, seed_species, chrm, 
                                                          beg, end, end - beg, strand, 1])) + "\n")







    # per_target_per_species
    expanded_genomic_loci = defaultdict(lambda: defaultdict(list))
    seed_genomic_loci     = defaultdict(lambda: defaultdict(list))

    #
    #   run liftover for each species using data in cached liftover data file
    # 
    for (species, 
         lift_chain_file1, 
         lift_chain_file2,
         via_species,
         minSizeQ1,
         minSizeQ2) in parse_delimited (open(liftover_data_file), 
                                               field_names = ["species",
                                                              "liftover file1",
                                                              "liftover file2",
                                                              "via_species",
                                                              "min_liftover_len1",
                                                              "min_liftover_len2"],
                                               header=True):

        step_name = ".via_%s" % (via_species) if lift_chain_file2 else ""



        input_file   = loci_to_lift_file_name
        results_file = os.path.join(liftover_working_dir, 
                                    "%s%s.mapped_loci" % (species,step_name))
        error_file   = os.path.join(liftover_working_dir, 
                                    "%s%s.unmapped" % (species,step_name))
        chain_file   = os.path.join(liftover_dir, lift_chain_file1)

        # 
        # create cmd for liftover
        #  
        cmd =   " ".join([
                    liftover_bin,
                    "-minMatch=0.01",            
                    "-minSizeT=4000",            
                    "-multiple",
                    "-minSizeQ=%s" % minSizeQ1,
                    input_file,                  
                    chain_file,                  
                    results_file,                 
                    error_file,                  
                    ])
        #sys.stderr.write(cmd + "\n")
        run_cmd (cmd, "lift via %s" % (via_species), logger)

        #os.system(cmd)

        if lift_chain_file2:


            input_file   = os.path.join(liftover_working_dir, 
                                        "%s%s.input" % (species,step_name))
            reformatted_input = open(input_file, "w")
            # reformat results
            with open(results_file) as results:
                for line in results:
                    chrm, beg, end, target, region, strand = line.strip().split("\t")
                    reformatted_input.write("\t".join([chrm, beg, end, target + "_" + region, "0", strand])+ "\n")
            reformatted_input.close()

            results_file = os.path.join(liftover_working_dir, "%s.mapped_loci" % (species))
            error_file   = os.path.join(liftover_working_dir, "%s.unmapped" % (species))
            chain_file   = os.path.join(liftover_dir, lift_chain_file2)

            cmd =   " ".join([
                        liftover_bin,
                        "-minMatch=0.01",            
                        "-bedPlus=6",            
                        "-minSizeT=4000",            
                        "-multiple",
                        "-minSizeQ=%s" % minSizeQ2,
                        input_file,                  
                        chain_file,                  
                        results_file,                 
                        error_file,                  
                        ])
            #sys.stderr.write(cmd + "\n")
            #os.system(cmd)
            run_cmd (cmd, "lift to %s" % (species), logger)

        # save the results
        with open(results_file) as results:
            for line in  results:
                try:     # DEBUGG
                    chrm, beg, end, target,region, strand = line.strip().split("\t")
                    is_expanded = False
                    # get rid of expanded
                    if "expanded_" in target:
                        target = target[9:]
                        is_expanded = True
                    beg, end = map(int, [beg, end])
                    target = target + "_" + region
                    target, region = target.split("_", 1)
                    if is_expanded:
                        expanded_genomic_loci[target][species].append([region, chrm, beg, end, strand])
                    else:
                        seed_genomic_loci[target][species].append([chrm, beg, end, strand])
                except:         # DEBUGG
                    print line  # DEBUGG
                    print results_file
                    raise       # DEBUGG

    #
    #   calculate the cutoff for lifted fragments which we shall ignore
    # 
    #       minimum of 5kb or 5% of the total lifted size
    # 
    cutoff = defaultdict(lambda: defaultdict(int))
    for target in expanded_genomic_loci:
        for species in expanded_genomic_loci[target]:

            # sort loci and merged adjacent
            loci = expanded_genomic_loci[target][species]
            loci = merge_neighboring_loci(loci, liftover_merge_threshold)
            expanded_genomic_loci[target][species] = loci

            for region, chrm, beg, end, strand in expanded_genomic_loci[target][species]:
                cutoff[target][species] += end - beg

            cutoff[target][species] = max(cutoff[target][species] * 0.05, 5000)


    # unused: dead code?
    genomic_loci = defaultdict(lambda: defaultdict(list))

    #
    #   keep genomic loci if 
    #       1) larger than cutoff
    #       2) overlaps with the seed transcript liftover 
    #           (without expansion on either side
    #                 
    for target in expanded_genomic_loci:
        for species in expanded_genomic_loci[target]:
            for region, chrm, beg, end, strand in expanded_genomic_loci[target][species]:
                if (end - beg < cutoff[target][species]):
                    for locus in seed_genomic_loci[target][species]:
                        if overlapping_loci(locus, [chrm, beg, end]):
                            break
                    # less than cutoff and no overlap
                    # ignore this expanded genomic locus
                    else:
                        continue

                # unused: dead code?
                genomic_loci[target][species].append([chrm, beg, end, end - beg, strand])
                lifted_genomic_loci.write("\t".join(map(str, [target, species, chrm, 
                                                              beg, end, end - beg, strand, region])) + "\n")

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Main logic


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
    



#
#   Necessary to protect the "entry point" of the program under windows.
#       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
#
if __name__ == '__main__':
    if options.debug:
        import unittest
        class Test_loci_liftover(unittest.TestCase):

            #       self.assertEqual(self.seq, range(10))
            #       self.assert_(element in self.seq)
            #       self.assertRaises(ValueError, random.sample, self.seq, 20)



            def test_function(self):
                """
                    test 
                """

        #
        #   call unit test without parameters
        #     

        if sys.argv.count("--debug"):
            sys.argv.remove("--debug")
        unittest.main()

    elif options.just_print:
        pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
                            verbose=options.verbose)

    elif options.flowchart:
        pipeline_printout_graph (     open(options.flowchart, "w"),
                             options.flowchart_format,
                             options.target_tasks,
                             options.forced_tasks,
                             draw_vertically = not options.draw_horizontally,
                             no_key_legend  = not options.key_legend_in_graph)
    else:
        pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
                            logger = stderr_logger if options.verbose else black_hole_logger,
                            verbose=options.verbose)



            import unittest
            class Test_prepare_liftover_data(unittest.TestCase):

                #       self.assertEqual(self.seq, range(10))
                #       self.assert_(element in self.seq)
                #       self.assertRaises(ValueError, random.sample, self.seq, 20)



                def test_function(self):
                    """
                        test 
                    """
                    download_liftover_files(
                                            "Mm9",                                                                      # SEED_TRANSCRIPT_UCSC_GENOME
                                            "/net/cpp-group/Leo/inprogress/brain_stuff/data/ucsc/additional.genomes",   # ADDITIONAL_GENOMES
                                            "ftp://hgdownload.cse.ucsc.edu/goldenPath",                                 # UCSC_GOLDEN_PATH_FTP
                                            "/net/cpp-group/Leo/inprogress/brain_stuff/data/liftover_resources",        # LIFTOVER_DIR_ROOT
                                            "/net/cpp-group/Leo/inprogress/brain_stuff/data/ucsc/Mm9.liftover_data",    # DATA_PARAMETERS_FOR_LIFTOVER
                                            logger, 
                                                                )

        #
        #   debug code not run if called as a module
        #     
        if __name__ == '__main__':
            if sys.argv.count("--debug"):
                sys.argv.remove("--debug")
            unittest.main()



#        get_loci_for_lifting
#                None
#                ->  identifier.species.loci_to_lift, liftover_resources
#            
#        collate_loci_per_species
#            
#            identifier.species.loci_to_lift
#            liftover_resources
#                -> species.loci_to_lift
#                
#                
#        do_liftover
#        
#            species.loci_to_lift
#                
#                -> per_species.lifted_loci
#                
#                
#        write_lifted_loci_per_target_per_species
                
