﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
using LMDecoder;

namespace rnnLMConsole
{
    class Program
    {
        static int argPos(string str, string[] args)
        {
            int i = 0;
            for (i = 0; i < args.Length; i++)
            {
                if (args[i] == str)
                {
                    return i;
                }
            }

            return -1;
        }

        static void Usage()
        {
            Console.WriteLine("Recurrent neural network based language modeling toolkit v 0.3d");

            Console.WriteLine("Options:");

            //
            Console.WriteLine("Parameters for training phase:");

            Console.WriteLine("-train <file>");
            Console.WriteLine("\tUse text data from <file> to train rnnlm model");

            Console.WriteLine("-class <int>");
            Console.WriteLine("\tWill use specified amount of classes to decompose vocabulary; default is 100");

            Console.WriteLine("-old-classes");
            Console.WriteLine("\tThis will use old algorithm to compute classes, which results in slower models but can be a bit more precise");

            Console.WriteLine("-rnnlm <file>");
            Console.WriteLine("\tUse <file> to store rnnlm model");

            Console.WriteLine("-valid <file>");
            Console.WriteLine("\tUse <file> as validation data");

            Console.WriteLine("-alpha <float>");
            Console.WriteLine("\tSet starting learning rate; default is 0.1");

            Console.WriteLine("-beta <float>");
            Console.WriteLine("\tSet L2 regularization parameter; default is 1e-7");

            Console.WriteLine("-hidden <int>");
            Console.WriteLine("\tSet size of hidden layer; default is 30");

            Console.WriteLine("-compression <int>");
            Console.WriteLine("\tSet size of compression layer; default is 0 (not used)");

            Console.WriteLine("-direct <int>");
            Console.WriteLine("\tSets size of the hash for direct connections with n-gram features in millions; default is 0");

            Console.WriteLine("-direct-order <int>");
            Console.WriteLine("\tSets the n-gram order for direct connections; default is 3");

            Console.WriteLine("-bptt <int>");
            Console.WriteLine("\tSet amount of steps to propagate error back in time; default is 0 (equal to simple RNN)");

            Console.WriteLine("-bptt-block <int>");
            Console.WriteLine("\tSpecifies amount of time steps after which the error is backpropagated through time in block mode (default 10, update at each time step = 1)");

            Console.WriteLine("-one-iter");
            Console.WriteLine("\tWill cause training to perform exactly one iteration over training data (useful for adapting final models on different data etc.)");

            Console.WriteLine("-save-step <int>");
            Console.WriteLine("\tModel will be saved during training after processing specified amount of sentences");

            Console.WriteLine("-min-improvement <float>");
            Console.WriteLine("\tSet minimal relative entropy improvement for training convergence; default is 1.003");

            Console.WriteLine("-gradient-cutoff <float>");
            Console.WriteLine("\tSet maximal absolute gradient value (to improve training stability, use lower values; default is 15, to turn off use 0)");

            //

            Console.WriteLine("Parameters for testing phase:");

            Console.WriteLine("-rnnlm <file>");
            Console.WriteLine("\tRead rnnlm model from <file>");

            Console.WriteLine("-test");
            Console.WriteLine("\tFlag for running the tool with test mode");

            Console.WriteLine("-lambda <float>");
            Console.WriteLine("\tSet parameter for linear interpolation of rnnlm and other lm; default weight of rnnlm is 0.75");

            Console.WriteLine("-dynamic <float>");
            Console.WriteLine("\tSet learning rate for dynamic model updates during testing phase; default is 0 (static model)");

            //

            Console.WriteLine("Additional parameters:");

            Console.WriteLine("-gen <int>");
            Console.WriteLine("\tGenerate specified amount of words given distribution from current model");

            Console.WriteLine("Examples:");
            Console.WriteLine("RnnLMConsole.exe -train train.txt -valid valid.txt -rnnlm model -hidden 100 -class 100 -bptt 4 -bptt-block 10 -direct-order 3 -direct 2 -save-step 10000");
            Console.WriteLine("RnnLMConsole -rnnlm model -test [console]");
            Console.WriteLine("");
        }
        static void Main(string[] args)
        {
            if (args.Length == 0)
            {
                Usage();
                return;
            }

            int i;
            int train_mode = 0;
            int alpha_set = 0;
            int class_size = 100;
            int old_classes = 0;
            double lambda = 0.75f;
            float gradient_cutoff = 15;
            float dynamic = 0;
            float starting_alpha = 0.1f;
            float regularization = 0.0000001f;
            float min_improvement = 1.003f;
            int hidden_size = 30;
            int compression_size = 0;
            long direct = 0;
            int direct_order = 3;
            int bptt = 0;
            int bptt_block = 10;
            int one_iter = 0;
            int save_step = 0;

            string train_file = "";
            string test_file = "";
            string valid_file = "";
            string rnnlm_file = "";

            const int MAX_NGRAM_ORDER = 20;

            //set bptt
            i = argPos("-bptt", args);
            if (i >= 0)
            {
                bptt = int.Parse(args[i + 1]);
                bptt++;
                if (bptt < 1) bptt = 1;
                Console.WriteLine("BPTT: {0}", bptt - 1);
            }


            //set bptt block
            i = argPos("-bptt-block", args);
            if (i >= 0)
            {

                bptt_block = int.Parse(args[i + 1]);
                if (bptt_block < 1) bptt_block = 1;
                Console.WriteLine("BPTT block: {0}", bptt_block);
            }

            //set direct connections
            i = argPos("-direct", args);
            if (i >= 0)
            {
                direct = int.Parse(args[i + 1]);
                direct *= 1000000;
                if (direct < 0)
                {
                    direct = 0;
                }
                Console.WriteLine("Direct connections: {0}M", (int)(direct / 1000000));
            }


            //set order of direct connections
            i = argPos("-direct-order", args);
            if (i >= 0)
            {
                direct_order = int.Parse(args[i + 1]);
                if (direct_order > MAX_NGRAM_ORDER)
                {
                    direct_order = MAX_NGRAM_ORDER;
                }
                Console.WriteLine("Order of direct connections: {0}", direct_order);
            }


            //set class size parameter
            i = argPos("-class", args);
            if (i >= 0)
            {
                class_size = int.Parse(args[i + 1]);
                Console.WriteLine("class size: {0}", class_size);
            }


            //set hidden layer size
            i = argPos("-hidden", args);
            if (i >= 0)
            {
                hidden_size = int.Parse(args[i + 1]);
                Console.WriteLine("Hidden layer size: {0}", hidden_size);
            }

            //search for train file
            i = argPos("-train", args);
            if (i >= 0)
            {
                train_file = args[i + 1];
                Console.WriteLine("train file: {0}", train_file);

                train_mode = 1;
            }

            //search for validation file
            i = argPos("-valid", args);
            if (i >= 0)
            {
                valid_file = args[i + 1];
                Console.WriteLine("valid file: {0}", valid_file);
            }



            //search for rnnlm file
            i = argPos("-rnnlm", args);
            if (i >= 0)
            {
                rnnlm_file = args[i + 1];
                Console.WriteLine("rnnlm file: {0}", rnnlm_file);
            }


            //set lambda
            i = argPos("-lambda", args);
            if (i >= 0)
            {
                lambda = double.Parse(args[i + 1]);
                Console.WriteLine("Lambda (interpolation coefficient between rnnlm and other lm): {0}", lambda);
            }



            //search for test file
            i = argPos("-test", args);
            if (i >= 0)
            {
                if (i + 1 == args.Length || args[i + 1][0] == '-')
                {
                    test_file = "console";
                }
                else
                {
                    test_file = args[i + 1];
                }
                Console.WriteLine("test file: {0}", test_file);
            }


            //set anti kasparek
            i = argPos("-save-step", args);
            if (i >= 0)
            {
                save_step = int.Parse(args[i + 1]);

                if ((save_step != 0) && (save_step < 10000)) save_step = 10000;
                Console.WriteLine("Model will be saved after each # sentences: {0}", save_step);
            }


            if (train_mode != 0)
            {
                RNNEncoder encoder = new RNNEncoder();

                encoder.setOneIter(one_iter);
                encoder.setClassSize(class_size);
                encoder.setOldClasses(old_classes);
                encoder.setLearningRate(starting_alpha);
                encoder.setGradientCutoff(gradient_cutoff);
                encoder.setRegularization(regularization);
                encoder.setMinImprovement(min_improvement);
                encoder.setHiddenLayerSize(hidden_size);
                encoder.setCompressionLayerSize(compression_size);
                encoder.setDirectSize(direct);
                encoder.setDirectOrder(direct_order);
                encoder.setBPTT(bptt);
                encoder.setBPTTBlock(bptt_block);
                encoder.setSaveStep(save_step);

                encoder.alpha_set = alpha_set;

                encoder.trainNet(train_file, valid_file, rnnlm_file);
            }


            if (test_file.Length > 0 && rnnlm_file.Length > 0)
            {
                RNNDecoder decoder = new RNNDecoder();

                decoder.setLambda(lambda);
                decoder.setRegularization(regularization);
                decoder.setDynamic(dynamic);
                decoder.LoadLM(rnnlm_file);

                StreamReader sr = null;
                StreamWriter sw = null;

                if (test_file != "console")
                {
                    //Open test file with input text
                    sr = new StreamReader(test_file);

                    //Generate and create output file
                    int pos = test_file.LastIndexOf(".");
                    string strOutputFile = test_file.Substring(0, pos) + "_rnnlm" + test_file.Substring(pos);
                    sw = new StreamWriter(strOutputFile);
                }

                while (true)
                {
                    string strLine = null;

                    if (sr != null)
                    {
                        strLine = sr.ReadLine();
                    }
                    else
                    {
                        strLine = Console.ReadLine();
                    }

                    if (strLine == null || strLine.Length == 0)
                    {
                        break;
                    }

                    string[] items = strLine.Split('\t');
                    string strText = items[0];

                    RnnLMResult LMRst = decoder.GetSentProb(strText);

                    if (sw != null)
                    {
                        sw.WriteLine("{0}\t{1}\t{2}\t{3}", strText, LMRst.logProb, LMRst.oovs, LMRst.perplexity);
                    }
                    else
                    {
                        Console.WriteLine("{0}\t{1}\t{2}\t{3}", strText, LMRst.logProb, LMRst.oovs, LMRst.perplexity);
                    }
                }

                if (sr != null)
                {
                    sr.Close();
                }
                if (sw != null)
                {
                    sw.Close();
                }

            }

        }
    }
}
