/*
 * Trainer.java
 *
 * Created on August 10, 2006, 5:55 PM
 *
 */

package galronnlp.perceptron;

import galronnlp.io.XMLGrammarReader;
import galronnlp.util.ProductionGenerator;
import galronnlp.util.Symbol;
import galronnlp.util.SymbolGenerator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Iterator;

/**
 *
 * This class implements the parameter estimation methods for the perceptron parser.
 *
 * @author Daniel A. Galron
 */
public class Trainer {
    
    Treebank treebank;
    ProductionGenerator pg = new ProductionGenerator();
    SymbolGenerator sg     = new SymbolGenerator();
    Symbol startSymbol  = sg.create("S!");
    Symbol endSymbol    = sg.create("/S");
    Symbol endSymbolPOS = sg.create("STOP");
    Grammar g = new Grammar(pg, sg);
    Parser parser;
    FeatureVector parameters = new FeatureVector();
    FeatureVector nonAveragedParameters = new FeatureVector();
    File grammarFile = null;
    Cache cache;

    boolean DEBUG = false;
    boolean EarlyUpdate = false;
    boolean RepeatedHypotheses = false;
    
    /** Creates a new instance of Trainer */
    public Trainer(String treeBankRoot, boolean full, String grammarFileName, double gamma, boolean earlyUpdate, boolean efficient) {
        this.EarlyUpdate = earlyUpdate;
        this.RepeatedHypotheses = efficient;
        if(full)
            treebank = new Treebank(treeBankRoot, sg);
        else
            treebank = new Treebank(treeBankRoot, Treebank.SAMPLE, Treebank.WSJ, sg);
        treebank.build();
        grammarFile = new File(grammarFileName);
        if(grammarFile.exists()) {
            System.err.println("Grammar File already exists...");
            System.err.println("Use grammar file to bootstrap? [yes]");
            BufferedReader is = new BufferedReader(new InputStreamReader(System.in));
            String ans = "";
            try {
                ans = is.readLine();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
            if(ans.length() == 0 || ans.equalsIgnoreCase("yes") || ans.equalsIgnoreCase("y")) {
                XMLGrammarReader reader = new XMLGrammarReader(grammarFileName, pg, sg);
                this.g = reader.getGrammar();
            } else if(ans.equalsIgnoreCase("no") || ans.equalsIgnoreCase("n")) {
                System.err.println("Aborting");
                System.exit(2);
            } else {
                System.err.println("Unknown answer. Aborting anyway.");
                System.exit(2);
            }
        }
        if(efficient)
            cache = new Cache(treebank.size());
        parser = new Parser(g, gamma);
    }
    
    public static void main(String[] args) {
        if(args.length < 10) {
            System.err.println("Usage: galronnlp.perceptron.Trainer -tb <treeBankRoot> -s <full|sample> -gr <grammarFileName> -t <T> -ga <gamma> [--efficient <T2>] [--early]");
            System.exit(2);
        }
        String treebankRoot = "";
        boolean full = true;
        String grammarFileName = "";
        double gamma = 0.0;
        int T = 0;
        int T2 = 0;
        boolean early = false;
        boolean efficient = false;
        // Checks for required arguments
        boolean tb = false;
        boolean s = false;
        boolean gr = false;
        boolean ga = false;
        boolean t = false;
        for(int i = 0; i < args.length; i++) {
            if(args[i].equalsIgnoreCase("-tb")) {
                i++;
                treebankRoot = args[i];
                tb = true;
            } else if(args[i].equalsIgnoreCase("-s")) {
                i++;
                if(args[i].equalsIgnoreCase("full"))
                    full = true;
                else
                    full = false;
                s = true;
            } else if(args[i].equalsIgnoreCase("-gr")) {
                i++;
                grammarFileName = args[i];
                gr = true;
            } else if(args[i].equalsIgnoreCase("-ga")) {
                i++;
                gamma = new Double(args[i]).doubleValue();
                ga = true;
            } else if(args[i].equalsIgnoreCase("-t")) {
                i++;
                T = new Integer(args[i]).intValue();
                t = true;
            } else if(args[i].equalsIgnoreCase("--efficient")) {
                i++;
                efficient = true;
                T2 = new Integer(args[i]).intValue();
            } else if(args[i].equalsIgnoreCase("--early")) {
                early = true;
            } else {
                System.err.println("Error: Unknown command line option " + args[i]);
                System.exit(2);
            }
        }
        Trainer trainer = new Trainer(treebankRoot, full, grammarFileName, gamma, early, efficient);
        if(efficient)
            trainer.EfficientPerceptronTraining(T, T2);
        else
            trainer.BasicPerceptronTraining(T);
    }
    
    public void retrieveGrammar() {
        System.err.println("Treebank size: " + treebank.size());
        for(int i = 0; i < treebank.size(); i++) {
            treebank.get(i).addToGrammar(g);
            if(i%10 == 0) System.err.print(".");
        }
        System.err.print("\n");
        if(DEBUG) {
            System.err.println(g.chains.size());
            System.err.println(g.allowableTriples.size());
            System.out.println(g);
        }
    }
    
    public void EfficientPerceptronTraining(int T1, int T2) {
        // Step 1: initialize the Cache
        System.out.println("Initializing cache");
        for(Iterator<Node> i = treebank.iterator(); i.hasNext();) {
            Node tree = i.next();
            int index = treebank.indexOf(tree);
            if(index % 10 == 0)
                System.err.println("i = " + index);
            tree.fireFeatures(g.rules);
            //tree.addToGrammar(g);
            Node zi;
            if(EarlyUpdate)
                zi = parser.earlyUpdateParse(tree.getSentence(), index, tree);
            else
                zi = parser.parse(tree.getSentence(), index);
            // If the parser only returned a partial parse, get the prefix of
            // the gold standard parse
            boolean full = true;
            if(zi.getSentence().size() != tree.getSentence().size()) {
                tree = tree.getPrefix(zi.getSentence().size());
                full = false;
            }
            // If the derived tree is not equal to the gold-standard parse,
            // then modify the parameter vector
            if(!tree.equals(zi)) {
                if(DEBUG) {
                    System.out.println("\n\n\n\n--------------------------");
                    System.out.println(tree.subtreeFeatures);
                    //System.out.println(tree);
                    System.out.println("--------------------------");
                    System.out.println(zi.subtreeFeatures);
                    //System.out.println(zi);
                }
                // alpha = alpha + phi(sentence,tree) - phi(sentence, z_i)
                parameters = (parameters.add(tree.subtreeFeatures)).sub(zi.subtreeFeatures);
                System.err.println("modifying parameters!");
                // average parameters as described in Collins & Roark
                // To use unaveraged parameters, comment out the following two lines, and
                // uncomment the next
                // NOTE: This kind of diverges from the training algorithm described
                // in the Collins & Roark paper, in that they only average the
                // parameters after inducing them - that is, during training,
                // they don't use averaged parameters, only when testing.

                nonAveragedParameters = nonAveragedParameters.add(parameters);
                //g.parameters = nonAveragedParameters.div(treebank.size() * T);
                g.parameters = parameters;
                if(DEBUG) {
                    System.out.println("==========================");
                    System.out.println(g.parameters.compress());
                    System.out.println("--------------------------\n\n\n\n");
                    System.exit(2);
                }
                cache.add(index, tree, parser.stack, full);
            }
        }
        // STEP 2
        for(int t = 0; t < T1; t++) {
            for(int j = 0; j < treebank.size(); j++) {
                if(cache.count(j) < T2) {
                    // This section is needed if the parser stack stored at j
                    // does not contain any complete parses. In that case,
                    // we should completely reparse the sentence, in hopes
                    // of getting a stack with full parses
                    if(!cache.isFull(j)) {
                        Node tree = treebank.get(j);
                        tree.fireFeatures(g.rules);
                        Node zi;
                        if(EarlyUpdate)
                            zi = parser.earlyUpdateParse(tree.getSentence(), j, tree);
                        else
                            zi = parser.parse(tree.getSentence(), j);
                        // If the parser only returned a partial parse, get the prefix of
                        // the gold standard parse
                        boolean full = true;
                        if(zi.getSentence().size() != cache.get(j).getSentence().size()) {
                            tree = tree.getPrefix(zi.getSentence().size());
                            full = false;
                        }
                        cache.add(j, tree, parser.stack, full, cache.count(j) + 1);
                    }
                    Node gj = cache.get(j);
                    Node z  = cache.getBestParse(j);
                    // If the parser only returned a partial parse, get the prefix of
                    // the gold standard parse
                    if(z.getSentence().size() != gj.getSentence().size())
                        gj = gj.getPrefix(z.getSentence().size());
                    // If the derived tree is not equal to the gold-standard parse,
                    // then modify the parameter vector
                    if(!gj.equals(z)) {
                        if(DEBUG) {
                            System.out.println("\n\n\n\n--------------------------");
                            System.out.println(gj.subtreeFeatures);
                            //System.out.println(gj);
                            System.out.println("--------------------------");
                            System.out.println(z.subtreeFeatures);
                            //System.out.println(z);
                        }
                        // alpha = alpha + phi(sentence,tree) - phi(sentence, z_i)
                        parameters = (parameters.add(gj.subtreeFeatures)).sub(z.subtreeFeatures);
                        System.err.println("modifying parameters!");
                        // average parameters as described in Collins & Roark
                        // To use unaveraged parameters, comment out the following two lines, and
                        // uncomment the next
                        // NOTE: This kind of diverges from the training algorithm described
                        // in the Collins & Roark paper, in that they only average the
                        // parameters after inducing them - that is, during training,
                        // they don't use averaged parameters, only when testing.
                        
                        nonAveragedParameters = nonAveragedParameters.add(parameters);
                        //g.parameters = nonAveragedParameters.div(treebank.size() * T1);
                        g.parameters = parameters;
                        if(DEBUG) {
                            System.out.println("==========================");
                            System.out.println(g.parameters.compress());
                            System.out.println("--------------------------\n\n\n\n");
                        }
                        cache.update(j);
                    }
                }
            }
        }
        // Comment out the next line if you don't want to use the averaged
        // parameters
        g.parameters = nonAveragedParameters.div(treebank.size() * T1);
        g.writeXML(this.grammarFile);
    }
    
    public void BasicPerceptronTraining(int T) {
        // for t = 1 .... T
        for(int t = 0; t < T; t++) {
            // for i = 1 ... n
            for(Iterator<Node> i = treebank.iterator(); i.hasNext();) {
                Node tree = i.next();
                int index = treebank.indexOf(tree);
                if(index % 10 == 0)
                    System.err.println("i = " + index);
                // <<<< DEBUG CODE
                //if(index == 10) {
                //    parser.D2 = true;
                //}
                // >>>> END DEBUG CODE
                tree.fireFeatures(g.rules);
                // Add all the chains and allowable triples to the grammar if
                // this is the first iteration on t
                if(t == 0) {
                    tree.addToGrammar(g);
                }
                // Get zi (i.e. parse the sentence and get the resulting best
                // scoring parse tree
                Node zi;
                if(EarlyUpdate)
                    zi = parser.earlyUpdateParse(tree.getSentence(), index, tree);
                else
                    zi = parser.parse(tree.getSentence(), index);
                // If the parser only returned a partial parse, get the prefix of
                // the gold standard parse
                if(zi.getSentence().size() != tree.getSentence().size())
                    tree = tree.getPrefix(zi.getSentence().size());
                // If the derived tree is not equal to the gold-standard parse,
                // then modify the parameter vector
                if(!tree.equals(zi)) {
                    if(DEBUG) {
                        System.out.println("\n\n\n\n--------------------------");
                        System.out.println(tree.subtreeFeatures);
                        //System.out.println(tree);
                        System.out.println("--------------------------");
                        System.out.println(zi.subtreeFeatures);
                        //System.out.println(zi);
                    }
                    // alpha = alpha + phi(sentence,tree) - phi(sentence, z_i)
                    parameters = (parameters.add(tree.subtreeFeatures)).sub(zi.subtreeFeatures);
                    System.err.println("modifying parameters!");
                    // average parameters as described in Collins & Roark
                    // To use unaveraged parameters, comment out the following two lines, and
                    // uncomment the next
                    // NOTE: This kind of diverges from the training algorithm described
                    // in the Collins & Roark paper, in that they only average the
                    // parameters after inducing them - that is, during training,
                    // they don't use averaged parameters, only when testing.
                    nonAveragedParameters = nonAveragedParameters.add(parameters);
                    //g.parameters = nonAveragedParameters.div(treebank.size() * T);
                    g.parameters = parameters;
                    if(DEBUG) {
                        System.out.println("==========================");
                        System.out.println(g.parameters.compress());
                        System.out.println("--------------------------\n\n\n\n");
                        System.exit(2);
                    }
                }
                // <<<< DEBUG CODE
                //if(index == 100) {
                //    System.err.println("Writing grammar...");
                //    //System.err.println(g.parameters);
                //    g.writeXML(this.grammarFile);
                //    System.exit(2);
                //}
                // >>>> END DEBUG CODE
            }
            // <<<< DEBUG CODE
            //System.out.println(g.parameters);
            //System.exit(2);
            // >>>> END DEBUG CODE
        }
        // Comment out the next line if you don't want to use the averaged
        // parameters
        g.parameters = nonAveragedParameters.div(treebank.size() * T);
        g.writeXML(this.grammarFile);
    }
}
