/*
 *  Copyright (C) 2010 Martin Haulrich <mwh.isv@cbs.dk> and Matthias Buch-Kromann <mbk.isv@cbs.dk>
 *
 *  This file is part of the IncrementalParser package.
 *
 *  The IncrementalParser program is free software: you can redistribute it and/or modify
 *  it under the terms of the GNU Lesser General Public License as published by
 *  the Free Software Foundation, either version 3 of the License, or
 *  (at your option) any later version.
 *
 *  This program is distributed in the hope that it will be useful,
 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *  GNU Lesser General Public License for more details.
 *
 *  You should have received a copy of the GNU Lesser General Public License
 *  along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
package org.osdtsystem.incparser.trainers;

import org.osdtsystem.incparser.loss.Loss;
import java.io.IOException;
import org.osdtsystem.incparser.data.ConllIterator;
import org.osdtsystem.incparser.data.CONLLSentence;
import org.osdtsystem.incparser.features.FeatureVector;
import org.osdtsystem.incparser.learners.Learner;
import org.osdtsystem.incparser.features.WeightVector;
import org.osdtsystem.incparser.graphs.ConllAdapter;
import org.osdtsystem.incparser.logging.ProgressLine;
import org.osdtsystem.incparser.logging.Log;
import org.osdtsystem.incparser.parsers.Parser;

/**
 *
 * @author Martin Haulrich and Matthias Buch-Kromann
 */
public class ConllTrainer implements Trainer {
    final Learner learner;
    final Parser parser1, parser2;
    final Loss loss;
    int sentences = 0;

    public ConllTrainer(Learner learner, Parser parser1, Parser parser2, Loss loss) {
        this.learner = learner;
        this.parser1 = parser1;
        this.parser2 = parser2;
        this.loss = loss;
    }

    public WeightVector train(ConllIterator sentenceIterator, int iterations, int iterations1)
            throws IOException {
        int iterations2 = iterations - iterations1;

        // Record starting time
        Log.pushStartTime();
        
        // Create symbols
        Parser alphabetParser = parser2 != null ? parser2 : parser1;
        ConllAdapter graph = alphabetParser.newGraph(learner.weights());
        Log.info("\nCreate alphabet with parser" + (parser2 != null ? "2" : "1")
                + " (prints dot every 100 sentences)");
        Log.pushStartTime();
        trainStageCreateAlphabet(alphabetParser, graph, sentenceIterator);
        System.gc();
        Log.info("Finished alphabet creation in " + Log.secondsElapsed() + " seconds with "
                + alphabetParser.edgeTypes() + " etypes, "
                        + alphabetParser.strings() + " strings, "
                        + alphabetParser.extractionFeatures() + " extfeatures ("
                + Log.memoryUsed() + " MB heap)\n");
        Log.popStartTime();

        // Train with parser1
        if (iterations1 > 0) {
            Log.info("Training with parser1 (" + iterations1 + " iterations, "
                    + sentences + " sentences): "
                    + parser1.getClass().getSimpleName());
            Log.pushStartTime();
            trainStageParse(parser1, graph, sentenceIterator, iterations1);
            Log.info("Finished parser1 training (" + Log.secondsElapsed() + " seconds, "
                + Log.memoryUsed() + " MB heap)\n");
            Log.popStartTime();
        }

        // Train with parser2
        if (iterations2 > 0) {
            Log.info("Train with parser2 (" + iterations2 + " iterations, " +
                    sentences + " sentences): "
                    + parser2.getClass().getSimpleName());
            Log.pushStartTime();
            trainStageParse(parser2, graph, sentenceIterator, iterations2);
            Log.info("Finished parser2 training in " + Log.secondsElapsed() + " seconds ("
                + Log.memoryUsed() + " MB heap)\n");
            Log.popStartTime();
        }

        // Calculate averaged weight vector
        learner.averageWeights();
        WeightVector weights = learner.weights();
        Log.info("Created weight vector with " + weights.features() + " features");

        // Log training time
        Log.info("Finished training in " + Log.secondsElapsed() + " seconds");
        return weights;
    }

    void trainStageCreateAlphabet(final Parser parser, final ConllAdapter goldGraph,
            final ConllIterator sentenceIterator) throws IOException {
        sentences = 0;
        sentenceIterator.reset();
        while (sentenceIterator.hasNext()) {
            goldGraph.clear();
            sentenceIterator.next().writeToGraph(goldGraph);
            goldGraph.featureVector();
            //goldGraph.score();
            sentences++;
            if (sentences % 100 == 0)
                Log.infoW(".");
        }
        parser.stopGrowth();
        //parser.printFeatureHandlers();
        Log.infoW("\n");
    }

    void trainStageParse(final Parser parser, final ConllAdapter gold,
            final ConllIterator sentenceIterator,
            final int iterations)
            throws IOException {
        for (int iteration = 0; iteration < iterations; iteration++) {
            // Initialize training            Log.phandler.openIteration("Training iteration " + iteration);
            double totalLoss = 0;
            long words = 0;
            sentenceIterator.reset();
            WeightVector weights = learner.weights();
            ConllAdapter system = parser.newGraph(weights);


            // Create progress line
            ProgressLine progress = new ProgressLine(sentences, 10);

            // Iterate over sentences
            Log.pushStartTime();
            int sent = 0;
            while (sentenceIterator.hasNext()) {
                // Read next sentence and write it to gold and system graph
                CONLLSentence sentence = sentenceIterator.next();
                Log.phandler.openSentence(sentence.toTokenString());
                gold.clear();
                system.clear();
                sentence.writeToGraph(gold);
                sentence.writeToGraph(system);
                system.clearEdges();
                system.clearGold();
                words += sentence.size() - 1;

                // Parse system graph
                system = parser.parse(learner.weights(), system);
                // gold.baseGraph().writeToFile("dump.gold");
                // system.baseGraph().writeToFile("dump.system");

                // Calculate loss and feature vectors
                double currentLoss = loss.loss(gold, system);
                totalLoss += currentLoss;
                // parser.printFeatureHandlers();
                FeatureVector goldFV = gold.featureVector();
                FeatureVector systemFV = system.featureVector();
                //goldFV.toSet();
                //systemFV.toSet();

                //}
                // Calculate update
                double updateFactor = sentences * (iterations - iteration) - (sent + 2) - 1;
                //System.out.println("updateFactor = " + updateFactor + " loss=" + currentLoss + " tloss=" + totalLoss);
                learner.update(goldFV, systemFV, currentLoss, updateFactor);

                //goldFV.writeToFile("dump.goldFV." + sent);
                //systemFV.writeToFile("dump.systemFV." + sent);
                //goldFV.subtract(systemFV).writeToFile("dump.diffFV." + sent);
                //weights.writeToFile("dump.weights." + sent);

                progress.progress(++sent);
                Log.phandler.closeSentence();
            }
            Log.info("   finished iteration: " + (iteration + 1) + "/"
                    + (iterations) + " [" + Log.secondsElapsed() + " seconds,  "
                    + totalLoss / words + " avg.loss/word, "
                    + Log.memoryUsed() + " MB heap]");
            Log.popStartTime();
            //Log.setLevel(Level.ALL);
            Log.phandler.closeIteration();
        }
    }
}
