#!/usr/bin/env python
"""

    ucsc_display_pipeline.py
    [--log_file PATH]
    [--verbose]

"""

################################################################################
#
#   ucsc_display_pipeline
#
#
#   Copyright (c) 4/12/2011 Leo Goodstadt
#
#   Permission is hereby granted, free of charge, to any person obtaining a copy
#   of this software and associated documentation files (the "Software"), to deal
#   in the Software without restriction, including without limitation the rights
#   to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#   copies of the Software, and to permit persons to whom the Software is
#   furnished to do so, subject to the following conditions:
#
#   The above copyright notice and this permission notice shall be included in
#   all copies or substantial portions of the Software.
#
#   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#   IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#   FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#   AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#   LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#   THE SOFTWARE.
#################################################################################

import sys, os

# add self to search path for testing
if __name__ == '__main__':
    exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
    module_name = os.path.split(sys.argv[0])[1]
    module_name = os.path.splitext(module_name)[0];
else:
    module_name = __name__

# Use import path from <<../python_modules>>
if __name__ == '__main__':
    sys.path.append(os.path.abspath(os.path.join("/home/lg/python_modules")))



#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   options


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888


if __name__ == '__main__':
    from optparse import OptionParser
    import StringIO

    parser = OptionParser(version="%prog 1.0", usage = "\n\n    %prog [options]")
    parser.add_option("-i", "--input_file", dest="input_file",
                        metavar="FILE",
                        action = "append",
                        type="string",
                        help="bam files.")
    parser.add_option("-g", "--genome_sizes_file", dest="genome_sizes_file",
                        metavar="FILE",
                        type="string",
                        help="Tab delimited genomes sizes from UCSC.")

    #
    #   general options: verbosity / logging
    #
    parser.add_option("-v", "--verbose", dest = "verbose",
                      action="count", default=0,
                      help="Print more verbose messages for each additional verbose level.")
    parser.add_option("-L", "--log_file", dest="log_file",
                      metavar="FILE",
                      type="string",
                      help="Name and path of log file")
    parser.add_option("--skip_parameter_logging", dest="skip_parameter_logging",
                        action="store_true", default=False,
                        help="Do not print program parameters to log.")
    parser.add_option("--debug", dest="debug",
                        action="count", default=0,
                        help="Set default program parameters in debugging mode.")







    #
    #   pipeline
    #
    parser.add_option("-t", "--target_tasks", dest="target_tasks",
                        action="append",
                        default = list(),
                        metavar="JOBNAME",
                        type="string",
                        help="Target task(s) of pipeline.")
    parser.add_option("-j", "--jobs", dest="jobs",
                        default=6,
                        metavar="N",
                        type="int",
                        help="Allow N jobs (commands) to run simultaneously.")
    parser.add_option("-n", "--just_print", dest="just_print",
                        action="store_true", default=False,
                        help="Don't actually run any commands; just print the pipeline.")
    parser.add_option("--flowchart", dest="flowchart",
                        metavar="FILE",
                        type="string",
                        help="Don't actually run any commands; just print the pipeline "
                             "as a flowchart.")

    #
    #   Less common pipeline options
    #
    parser.add_option("--key_legend_in_graph", dest="key_legend_in_graph",
                        action="store_true", default=False,
                        help="Print out legend and key for dependency graph.")
    parser.add_option("--draw_graph_horizontally", dest="draw_horizontally",
                        action="store_true", default=False,
                        help="Draw horizontal dependency graph.")
    parser.add_option("--flowchart_format", dest="flowchart_format",
                        metavar="FORMAT",
                        type="string",
                        default = 'svg',
                        help="format of dependency graph file. Can be 'ps' (PostScript), "+
                              "'svg' 'svgz' (Structured Vector Graphics), " +
                              "'png' 'gif' (bitmap  graphics) etc ")
    parser.add_option("--forced_tasks", dest="forced_tasks",
                        action="append",
                        default = list(),
                        metavar="JOBNAME",
                        type="string",
                        help="Pipeline task(s) which will be included even if they are up to date.")

    # get help string
    f =StringIO.StringIO()
    parser.print_help(f)
    helpstr = f.getvalue()
    original_args = " ".join(sys.argv)
    (options, remaining_args) = parser.parse_args()


    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    #
    if not options.input_file:
        options.input_file = ["/data/mus/lg/projects/dnase_hypersensitivity/data/*.bam"]
    if not options.genome_sizes_file:
        options.genome_sizes_file = "/data/mus/lg/projects/dnase_hypersensitivity/data/genome_sizes_from_ucsc.data"
    if not options.log_file:
        options.log_file            = os.path.join("ucsc_display_pipeline.log")
    if not options.verbose:
        options.verbose                 = 2
    #vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
    #                                             #
    #   Debug: Change these                       #
    #                                             #
    #^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

    #
    #   mandatory options
    #
    mandatory_options = []
    def check_mandatory_options (options, mandatory_options, helpstr):
        """
        Check if specified mandatory options have b een defined
        """
        missing_options = []
        for o in mandatory_options:
            if not getattr(options, o):
                missing_options.append("--" + o)

        if not len(missing_options):
            return

        raise Exception("Missing mandatory parameter%s: %s.\n\n%s\n\n" %
                        ("s" if len(missing_options) > 1 else "",
                         ", ".join(missing_options),
                         helpstr))
    check_mandatory_options (options, mandatory_options, helpstr)


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   imports


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

from ruffus import *
from ruffus.ruffus_exceptions import JobSignalledBreak

from glob import glob
from run_cmd import run_cmd
#from json import dumps
#from collections import defaultdict
from colours import get_contrasty_colours


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Functions


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Logger


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

if __name__ == '__main__':
    import logging
    import logging.handlers

    MESSAGE = 15
    logging.addLevelName(MESSAGE, "MESSAGE")

    def setup_std_logging (logger, log_file, verbose):
        """
        set up logging using programme options
        """
        class debug_filter(logging.Filter):
            """
            Ignore INFO messages
            """
            def filter(self, record):
                return logging.INFO != record.levelno

        class NullHandler(logging.Handler):
            """
            for when there is no logging
            """
            def emit(self, record):
                pass

        # We are interesting in all messages
        logger.setLevel(logging.DEBUG)
        has_handler = False

        # log to file if that is specified
        if log_file:
            handler = logging.FileHandler(log_file, delay=False)
            class stipped_down_formatter(logging.Formatter):
                def format(self, record):
                    prefix = ""
                    if not hasattr(self, "first_used"):
                        self.first_used = True
                        prefix = "\n" + self.formatTime(record, "%Y-%m-%d")
                        prefix += " %(name)s\n" % record.__dict__
                    if record.levelname in ("INFO", "MESSAGE", "DEBUG"):
                        self._fmt = " %(asctime)s - %(message)s"
                    else:
                        self._fmt = " %(asctime)s - %(levelname)-7s - %(message)s"
                    return prefix + logging.Formatter.format(self, record)
            handler.setFormatter(stipped_down_formatter("%(asctime)s - %(name)s - %(levelname)6s - %(message)s", "%H:%M:%S"))
            handler.setLevel(MESSAGE)
            logger.addHandler(handler)
            has_handler = True

        # log to stderr if verbose
        if verbose:
            stderrhandler = logging.StreamHandler(sys.stderr)
            stderrhandler.setFormatter(logging.Formatter("    %(message)s"))
            stderrhandler.setLevel(logging.DEBUG)
            if log_file:
                stderrhandler.addFilter(debug_filter())
            logger.addHandler(stderrhandler)
            has_handler = True

        # no logging
        if not has_handler:
            logger.addHandler(NullHandler())


    #
    #   set up log
    #
    logger = logging.getLogger(module_name)
    setup_std_logging(logger, options.log_file, options.verbose)

    #
    #   Allow logging across Ruffus pipeline
    #
    def get_logger (logger_name, args):
        return logger

    from ruffus.proxy_logger import *
    (logger_proxy,
     logging_mutex) = make_shared_logger_and_proxy (get_logger,
                                                    module_name,
                                                    {})

    #
    #   log programme parameters
    #
    if not options.skip_parameter_logging:
        programme_name = os.path.split(sys.argv[0])[1]
        logger.info(programme_name)


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Pipeline


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

file_name_to_colours = dict()
input_file_names = []
for f in options.input_file:
    input_file_names.extend(glob(f))

def get_file_part (f):
    return os.path.splitext(os.path.split(f)[1])[0]

#
#   give each file name a different contrasty colour
#
input_file_names.sort()

#contrasty_colours = get_contrasty_colours()
contrasty_colours =((0x00, 0x5B, 0x9A),
                    (0x01, 0x91, 0xC8),
                    (0x66, 0x99, 0x66),
                    (0x84, 0x59, 0x6B),
                    (0x98, 0x98, 0x98),
                    (0xC8, 0x60, 0x00))


for i, f in enumerate(input_file_names):
    file_name_to_colours[get_file_part(f)] = contrasty_colours[i]

#
#   Remove duplicates
#
@transform(input_file_names, regex(r"/data/([^/]+).bam"), [r"/display_on_ucsc/\1.nodup.bam", r"/display_on_ucsc/\1.duplicates_removed"], logger_proxy, logging_mutex)
def remove_duplicates (input_file, output_files, logger_proxy, logging_mutex):
    output_file, flag_file = output_files
    cmd = "samtools rmdup -s %s %s" % (input_file, output_file)
    #open(output_file, "w")
    run_cmd (cmd, "Remove duplicates from %s" % input_file, logging_mutex, logger_proxy)
    open(flag_file, "w")
    with logging_mutex:
        logger_proxy.log(MESSAGE, cmd)


#
#   Create bedgraph files
#
@transform(remove_duplicates, suffix(".nodup.bam"), [".bedgraph", ".bedgraph_created"], logger_proxy, logging_mutex)
def create_bedgraphs (input_file, output_files, logger_proxy, logging_mutex):
    input_file = input_file[0]
    output_file, flag_file = output_files

    #
    #   write header line
    #
    output = open(output_file, "w")

    # get colour for this strain
    file_part = get_file_part(output_file)
    colour = tuple(file_name_to_colours[file_part])

    output.write("track type=bedGraph name='%s' description='%s' color=%d,%d,%d\n" % ((file_part, "DNA hypersensitivity data for %s" % file_part) + colour))
    output.close()

    cmd = "genomeCoverageBed -bg -ibam %s -g %s >> %s" % (input_file, options.genome_sizes_file, output_file)
    run_cmd (cmd, "Genome coverage for %s" % input_file, logging_mutex, logger_proxy)
    open(flag_file, "w")
    with logging_mutex:
        logger_proxy.log(MESSAGE, cmd)

#
#   Create bigwig files
#
@transform(create_bedgraphs, suffix(".bedgraph"), [".bw", ".bigwig_created"], logger_proxy, logging_mutex)
def create_bigwigs (input_file, output_files, logger_proxy, logging_mutex):
    input_file = input_file[0]
    output_file, flag_file = output_files

    cmd = "bedGraphToBigWig %s %s %s" % (input_file, options.genome_sizes_file, output_file)
    run_cmd (cmd, "Genome coverage for %s" % input_file, logging_mutex, logger_proxy)
    open(flag_file, "w")
    with logging_mutex:
        logger_proxy.log(MESSAGE, cmd)


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Main logic


#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
if __name__ == '__main__':

    if options.just_print:
        pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
                            verbose=options.verbose)

    elif options.flowchart:
        pipeline_printout_graph (     open(options.flowchart, "w"),
                             options.flowchart_format,
                             options.target_tasks,
                             options.forced_tasks,
                             draw_vertically = not options.draw_horizontally,
                             no_key_legend   = not options.key_legend_in_graph)
    else:
        pipeline_run(options.target_tasks, options.forced_tasks,
                            multiprocess    = options.jobs,
                            logger          = stderr_logger,
                            verbose         = options.verbose)





    #for file_name in input_file_names:
    #    samtools rmdup -s display_on_ucsc/WTCHG_10808.chr11_region.bam display_on_ucsc/WTCHG_10808.chr11_region.nodup.bam
    #
    #echo "track type=bedGraph name=track_label description=center_label color=255,0,0" > display_on_ucsc/aj.chr11_region.bedgraph
    #genomeCoverageBed -bg -ibam display_on_ucsc/aj.chr11_region.bam -g display_on_ucsc/genome_sizes_from_ucsc.data >> display_on_ucsc/aj.chr11_region.bedgraph
    #python compress_bedgraph.py -i display_on_ucsc/aj.chr11_region.bedgraph -o display_on_ucsc/aj.chr11_region.compressed.bedgraph

