/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package biofilter.pipelines;

import biofilter.exceptions.InvalidRecordException;
import biofilter.filters.FillGapFilter;
import biofilter.filters.HistogramFilter;
import biofilter.filters.RecordsFile2GFF;
import biofilter.filters.RecordsFile2MappedPosition;
import biofilter.filters.ScoreRegionsFMHistoFilter;
import biofilter.filters.ScoredRegions2OperonFilter;
import biofilter.records.FileNameRecord;
import biofilter.records.GFFRecord;
import biofilter.records.HistogramRecord;
import biofilter.records.OperonRecord;
import biofilter.records.Record;
import biofilter.records.ScoredRegionRecord;
import biofilter.sources.RecordsFile;
import biofilter.util.ProcessUtil;
import biofilter.util.SourceUtil;
import biofilter.util.SystemProperties;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.*;

/**
 * The start site pipeline reads a set of raw reads, converts them to mapped positions
 * and then uses the mapped positions, a GFF file of genes, and the genome.fasta file to identify transcriptional
 * start sites genome wide.
 * @author Daniel Quest
 * @date   April 8, 2010
 */
public class StartSitePipeline implements PipelineIF {

    private double geneCoverage = 0.5;  //Genes w/ average coverage score <= 0.5 are flagged as insuficent data. (can be command line modified)
    private int allowableGapSize = 35; //Genes w/ a gap on one strand or the other with a gap > 35bp are also flagged as insufficent data. (can be command line modified)

    //Pipes and filters for building histograms from raw reads...
    private RecordsFile fnrf = null;
    private HistogramFilter histoF = null;
    private RecordsFile2GFF rf2gff = null;

    //Pipes and filters for processing the annotation data (in GFF format)
    private RecordsFile2GFF anno2gff = null;
    private RecordsFile annGFF = null;
    private FillGapFilter cdsAndGaps = null;

    //Pipes and filters for scoring regions
    private ScoreRegionsFMHistoFilter srf = null;

    //General Utility Classes
    private ProcessUtil pu = new ProcessUtil();
    private SystemProperties sp = null;
    private SourceUtil su = new SourceUtil();
    public StartSitePipeline(){

    }
    String fileNameRecordsFile = ":";
    String annotationFile = ":";
    String outputFile = "operons.gff";
    private void init(){
        try {
            //General Utility Class Setup:
            sp = new SystemProperties();

            //Pipes and Filters for building the histogram.
            this.fnrf = new RecordsFile(fileNameRecordsFile,
                                               "biofilter.records.FileNameRecord");
            //Logger.getLogger(Mapper454HistoPipeline.class.getName()).log(Level.INFO, "Mapped Read File: " + fileNameRecordsFile);
            rf2gff = new RecordsFile2GFF();
            rf2gff.setSource(this.fnrf);
            histoF = new HistogramFilter();
            histoF.setSource(rf2gff);
            
            initAnnotation();

        } catch (IOException ex) {
            Logger.getLogger(StartSitePipeline.class.getName()).log(Level.SEVERE, "Can not use SystemProperties in StartSitesPipeline", ex);
        }
    }

    private void initAnnotation(){
        try {
            //Pipes and Filters for getting annotation data.
            annGFF = new RecordsFile(this.annotationFile, "biofilter.records.GFFRecord");
            cdsAndGaps = new FillGapFilter();
            cdsAndGaps.setSource(annGFF);
        } catch (InvalidRecordException ex) {
            Logger.getLogger(StartSitePipeline.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    protected Options options = null;
    public void setup(String [] args) throws org.apache.commons.cli.ParseException{
            Double dd = new Double("0");
            options = new Options();
            //Setup Options
            //Boolean Options
            Option help = new Option( "h", "help", false, "print help message" );

            //Param Options
            Option infile   = new Option("i", "infile", true, "This file contains paths to raw read files" );
            Option outfile   = new Option("o", "outfile", true, "GFF formated output for start site prediction results" );
            Option anno   = new Option("a", "annotation", true, "GFF formated input file representing the location of genes" );
            Option genomeFA   = new Option("g", "genome", true, "Fasta formated copy of the genome" );
            Option mapper = new Option("m", "mapper", true, "Mapper Used e.g. GSMapper454, bowtie, blast (Default GFF records)" );
            Option coverage = new Option("c", "coverage", true, "Genes with average coverage below this value are flagged as insufficent data (default 0.5)" );
            Option gapSize = new Option("x", "gapsize", true, "Genes with with gaps larger than this are flagged as insufficent data (default 35bp)" );

            //Add command line options to influence behavior
            options.addOption( help );
            options.addOption(infile);
            options.addOption(outfile);
            options.addOption(anno);
            options.addOption(genomeFA);
            options.addOption(mapper);
            options.addOption(coverage);
            options.addOption(gapSize);

            System.out.println("Input Command: ");
            for(int i=0; i<args.length;i++)
                System.out.print(args[i]);
            System.out.print("\n");
            CommandLineParser parser = new GnuParser();
            CommandLine cmd = parser.parse( options, args);
            //String[] parsed = cmd.getArgs();
            //for(int i=0;i<parsed.length;i++){
            //    System.err.println(parsed[i]);
            //}

            //Logic to handle standard command line options such as help
            if (cmd.hasOption("help") == true || cmd.hasOption("infile") == false){
                // automatically generate the help statement
                HelpFormatter formatter = new HelpFormatter();
                formatter.printHelp( "SeqGenOpts", options );
                System.exit(0);
            }
            //Set the path to the path containg path locations of read files.
            if(cmd.hasOption("infile")){
                this.fileNameRecordsFile = cmd.getOptionValue("infile");
                Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Input Paths File (references to reads): " + this.fileNameRecordsFile, this.fileNameRecordsFile);
            }
            //Set the path to the path containg path locations of read files.
            if(cmd.hasOption("outfile")){
                this.outputFile = cmd.getOptionValue("outfile");
                Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Output File: " + this.outputFile, this.outputFile);
            }
            //Set the path to the path containg path locations of read files.
            if(cmd.hasOption("annotation")){
                this.annotationFile = cmd.getOptionValue("annotation");
                Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Annotation File: " + this.annotationFile, this.annotationFile);
            }
            if(cmd.hasOption("coverage")){
                this.geneCoverage = dd.parseDouble( cmd.getOptionValue("coverage") );
            }Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Gene Coverage Value: " + this.geneCoverage, this.geneCoverage);
            if(cmd.hasOption("gapsize")){
                this.allowableGapSize = Integer.parseInt( cmd.getOptionValue("gapsize") );
            }Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Gene Coverage Value: " + this.geneCoverage, this.geneCoverage);
            init();
            if(cmd.hasOption('m') ){
                if(cmd.getOptionValue('m').equalsIgnoreCase("GSMapper454")){
                    rf2gff.setMappedPositionType("biofilter.records.Mapper454Record");
                }else if(cmd.getOptionValue('m').equalsIgnoreCase("bowtie")){
                    rf2gff.setMappedPositionType("biofilter.records.BowTieRecord");
                }else if(cmd.getOptionValue('m').equalsIgnoreCase("blast")){
                    rf2gff.setMappedPositionType("biofilter.records.BlastTabRecord");
                }else if(cmd.getOptionValue('m').equalsIgnoreCase("GFF")){
                    rf2gff.setMappedPositionType("biofilter.records.GFFRecord");
                }
            }else{
                rf2gff.setMappedPositionType("biofilter.records.GFFRecord");
            }
            //System.err.println("Start Site Pipeline 112, mappedPositionType :" + rf2mp.getMappedPositionType());
    }

    public static void main(String[] args) {
        try {

            String[] arguments = new String[13];
            arguments[0] = "StartSitePipeline";
            arguments[1] = "-m";
            arguments[2] = "GSMapper454";
            arguments[3] = "-c";
            arguments[4] = "3";
            arguments[5] = "-i";
            //arguments[6] = "/auto/transcriptomics/runfiles/Clostridium_thermocellum_ATCC_27405/Cther_partial.run";
            arguments[6] = "/auto/transcriptomics/runfiles/Clostridium_thermocellum_ATCC_27405/ALLSMASHED.run";
            arguments[7] = "-o";
            arguments[8] = "/auto/transcriptomics/runfiles/startSites.gff";
            arguments[9] = "-a";
            arguments[10] = "/auto/transcriptomics/genomes/Clostridium_thermocellum_ATCC_27405/NC_009012CDS.gff";
            arguments[11] = "-x";
            arguments[12] = "50";
            System.out.println("Starting Start Site Prediction Pipeline: ");
            StartSitePipeline ssp = new StartSitePipeline();
            ssp.setup(arguments);
            ssp.execPipeline();
        } catch (Exception e) {
            System.err.println(e.getMessage());
        }
    }

    public Vector<Record> execPipeline() throws Exception {

        //Below is an example of getting the raw positions out of an arbitrary set of files
        //while(!rf2gff.eof()){
        //    Record r = rf2gff.getRecord();
        //    System.err.print(r.get());
        //}

        //Below is an example of creating a histogram from a set of reads...
        //while(!histoF.eof()){
        //    Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Constructing Profile... ");
        //    Record r = histoF.getRecord();
            //System.err.print(r.get());
        //}

        /*
        while(!this.anno2gff.eof()){
            Record r = anno2gff.getRecord();
            System.err.print(r.get());
        }
         * */

        /*
         * This loop goes through the annotation...
        while(!this.annGFF.eof()){
            GFFRecord gff = (GFFRecord) this.annGFF.getRecord();
            System.err.println(gff.get());
        }
         * */

        /* This loop goes through the annotation, and then fills the gaps, returning a sorted list of positions.
        while(!this.cdsAndGaps.eof()){
            GFFRecord gff = (GFFRecord) this.cdsAndGaps.getRecord();
            System.err.println(gff.get());
        }
         * */

        
        while(!histoF.eof()){
            Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Constructing Profile... ");
            HistogramRecord hr = (HistogramRecord) histoF.getRecord();

            //Pipes and filters for the annotation stream
            initAnnotation();

            //Pipes and filters for scoring regions
            Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Scoring Regions in the Annotation... ");
            srf = new ScoreRegionsFMHistoFilter(hr);
            srf.setRequiredCoverage(this.geneCoverage);
            srf.setMinReadLength(this.allowableGapSize);
            srf.setSource(cdsAndGaps);

            //To display the scored regions based on the histogram...
            //while(!srf.eof()){
            //    ScoredRegionRecord srr = (ScoredRegionRecord) srf.getRecord();
            //    String s = srr.get();
            //    System.err.println(srr.get());
            //}

            //To serialize to a file the scored regions based on the histogram...
            FileOutputStream f_out = new FileOutputStream("./test/testdata/ScoredRegionRecordExample.data");
            ObjectOutputStream obj_out = new ObjectOutputStream (f_out);
            while(!srf.eof()){
                ScoredRegionRecord srr = (ScoredRegionRecord) srf.getRecord();
                String s = srr.get();
                System.err.println(srr.get());
                obj_out.writeObject ( srr );
            }
/*
            ScoredRegions2OperonFilter sr2oper = new ScoredRegions2OperonFilter();
            sr2oper.setSource(srf);
            Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Constructing Operons... ");
            try{
                BufferedWriter out = new BufferedWriter(new FileWriter(this.outputFile));
                while(!sr2oper.eof()){
                    OperonRecord o = (OperonRecord) sr2oper.getRecord();
                    if(o != null){
                        //System.err.println(o.get());
                        out.write(o.get());
                        out.write("\n");
                    }else {
                        System.err.println("Coverage is TOO low to generate Operons!");
                    }
                }
                out.close();
            }catch (IOException e) {
                System.err.println(e);
            }
*/
            Logger.getLogger(StartSitePipeline.class.getName()).log(Level.INFO, "Finished, Thanks for Using!\n ");

        }//end while histoEOF


        return null;
    }

}
