﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace LMDecoder
{
    public class RNNEncoder : RnnLM
    {

        List<int> GetFeatureList(string str)
        {
            List<int> featureList = new List<int>();
            string[] tokens = str.Split(' ');

            foreach (string token in tokens)
            {
                if (vocab_hash.ContainsKey(token) == false)
                {
                    featureList.Add(-1);
                }
                else
                {
                    featureList.Add(vocab_hash[token]);
                }
            }

            featureList.Add(vocab_hash["</s>"]);

            return featureList;
        }

        Queue<string> strQ = new Queue<string>();
        int readWordIndex(StreamReader fin)
        {
            if (strQ.Count == 0)
            {
                //Queue is empty, read more from file
                string strLine = null;
                strLine = fin.ReadLine();
                if (strLine == null)
                {
                    return -1;
                }

                string[] key = strLine.Split('\t');

                string[] items = key[0].Split(' ');
                foreach (string item in items)
                {
                    if (item.Length > 0)
                    {
                        strQ.Enqueue(item);
                    }
                }
                strQ.Enqueue("</s>");
            }

            string word = strQ.Dequeue();
            if (vocab_hash.ContainsKey(word) == false)
            {
                return -1;
            }

            return vocab_hash[word];
        }

        void saveNet(string strModelFile)       //will save the whole network structure                                                        
        {
            StreamWriter fo = new StreamWriter(strModelFile);
            int a, b;

            fo.WriteLine("last probability of validation data: {0}", llogp);

            fo.WriteLine("current position in training data: {0}", train_cur_pos);
            fo.WriteLine("current probability of training data: {0}", logp);
            fo.WriteLine("save after processing # sentences: {0}", save_step);

            fo.WriteLine("input layer size: {0}", layer0_size);
            fo.WriteLine("hidden layer size: {0}", layer1_size);
            fo.WriteLine("compression layer size: {0}", layerc_size);
            fo.WriteLine("output layer size: {0}", layer2_size);

            fo.WriteLine("direct connections: {0}", direct_size);
            fo.WriteLine("direct order: {0}", direct_order);

            fo.WriteLine("bptt: {0}", bptt);
            fo.WriteLine("bptt block: {0}", bptt_block);

            fo.WriteLine("vocabulary size: {0}", vocab_size);
            fo.WriteLine("class size: {0}", class_size);

            fo.WriteLine("old classes: {0}", old_classes);

            fo.WriteLine("starting learning rate: {0}", starting_alpha);
            fo.WriteLine("current learning rate: {0}", alpha);
            fo.WriteLine("learning rate decrease: {0}", alpha_divide);

            fo.WriteLine();
            fo.WriteLine("Vocabulary:");
            for (a = 0; a < vocab_size; a++)
            {
                fo.WriteLine("{0}\t{1}\t{2}\t{3}", a, vocab[a].cn, vocab[a].word, vocab[a].class_index);
            }

            fo.WriteLine();
            fo.WriteLine("Hidden layer activation:");
            for (a = 0; a < layer1_size; a++)
            {
                fo.WriteLine("{0}", neu1[a].ac);
            }

            //////////
            fo.WriteLine();
            fo.WriteLine("Weights 0->1:");
            for (b = 0; b < layer1_size; b++)
            {
                for (a = 0; a < layer0_size; a++)
                {
                    fo.WriteLine("{0}", syn0[a + b * layer0_size].weight);
                }
            }

            if (layerc_size > 0)
            {
                fo.WriteLine();
                fo.WriteLine("Weights 1->c:");
                for (b = 0; b < layerc_size; b++)
                {
                    for (a = 0; a < layer1_size; a++)
                    {
                        fo.WriteLine("{0}", syn1[a + b * layer1_size].weight);
                    }
                }

                fo.WriteLine();
                fo.WriteLine("Weights c->2:");
                for (b = 0; b < layer2_size; b++)
                {
                    for (a = 0; a < layerc_size; a++)
                    {
                        fo.WriteLine("{0}", sync[a + b * layerc_size].weight);
                    }
                }
            }
            else
            {
                fo.WriteLine();
                fo.WriteLine("Weights 1->2:");
                for (b = 0; b < layer2_size; b++)
                {
                    for (a = 0; a < layer1_size; a++)
                    {
                        fo.WriteLine("{0}", syn1[a + b * layer1_size].weight);
                    }
                }
            }


            ////////

            fo.WriteLine();
            fo.WriteLine("Direct connections:");
            long aa;
            for (aa = 0; aa < direct_size; aa++)
            {
                fo.WriteLine("{0}", syn_d[aa]);
            }


            fo.Close();
        }

        int addWordToVocab(string word)
        {
            vocab_word vw = new vocab_word(word);
            vw.word = word;
            vw.cn = 0;
            vocab.Add(vw);
            vocab_size++;

            vocab_hash.Add(word, vocab_size - 1);

            return vocab_size - 1;
        }

        long learnVocabFromTrainFile(string strTrainCorpus)    //assumes that vocabulary is empty
        {
            long totalWordNum = 0;

            //</s>'s id should be 0
            addWordToVocab("</s>");
            StreamReader fin = new StreamReader(strTrainCorpus, Encoding.UTF8);
            string strLine = null;
            long max_cn = 0;

            while ((strLine = fin.ReadLine()) != null)
            {
                string[] col = strLine.Split('\t');
                col[0] = col[0] + " </s>";
                string[] items = col[0].Split();
                int freq = 1;
                if (col.Length > 1)
                {
                    freq = int.Parse(col[1]);
                }


                foreach (string item in items)
                {
                    if (item.Length == 0)
                    {
                        continue;
                    }

                    totalWordNum += freq;

                    if (vocab_hash.ContainsKey(item) == false)
                    {
                        addWordToVocab(item);
                    }
                    int idx = vocab_hash[item];
                    vocab[idx].cn += freq;

                    if (vocab[idx].cn > max_cn)
                    {
                        max_cn = vocab[idx].cn;
                    }
                }
            }

            if (vocab[0].cn <= max_cn)
            {
                vocab[0].cn = max_cn + 1;
            }

            vocab.Sort();
            vocab_hash.Clear();
            vocab_size = 0;
            List<vocab_word> vocab_new = new List<vocab_word>();
            foreach (vocab_word item in vocab)
            {
                vocab_new.Add(item);
                vocab_hash.Add(item.word, vocab_size);
                vocab_size++;
            }
            vocab = vocab_new;
            Console.WriteLine("Vocab size: {0}", vocab_size);
            fin.Close();

            return totalWordNum;
        }


        public void trainNet(string strTrainCorpus, string strValidCorpus, string strModelFile)
        {
            int iter = 0;
            int a, b, wordcn;
            int word, last_word;
            Console.WriteLine("Starting training using file {0}", strTrainCorpus);
            starting_alpha = alpha;

            if (File.Exists(strModelFile) == true)
            {
                Console.WriteLine("Restoring network from file to continue training...");
                restoreNet(strModelFile);
                Console.WriteLine("Re-train at {0} line.", train_cur_pos);
            }
            else
            {
                Console.Write("Loading vocab from corpus...");
                learnVocabFromTrainFile(strTrainCorpus);
                initNet();
                iter = 0;
                Console.WriteLine("Done.");
            }

            if (class_size > vocab_size)
            {
                Console.WriteLine("WARNING: number of classes exceeds vocabulary size!");
            }

            while (true) // for each iteration
            {
                Console.WriteLine("Iter: {0}\tAlpha: {1:F}", iter, alpha);

                //TRAINING PHASE
                netFlush();

                StreamReader sr = new StreamReader(strTrainCorpus, Encoding.UTF8);
                string strLine = null;
                if (train_cur_pos > 0)
                {
                    //Skip first train_cur_pos line
                    for (int i = 0; i < train_cur_pos; i++)
                    {
                        sr.ReadLine();
                    }
                }

                int termCnt = 0;
                int queryCnt = train_cur_pos;
                logp = 0;

                while ((strLine = sr.ReadLine()) != null)
                {
                    queryCnt++;
                    if ((save_step > 0) && ((queryCnt % save_step) == 0))
                    {
                        train_cur_pos = queryCnt;
                        saveNet(strModelFile);
                    }

                    string[] keys = strLine.Split('\t'); //Format: token list \t frequency
                    List<int> wordIdList = GetFeatureList(keys[0]);
                    wordIdList.Insert(0, 0);
                    int freq = int.Parse(keys[1]);
                    //Insert the begining of the sentence

                    while (freq > 0)
                    {
                        freq--;
                        //Process the sentence
                        for (int z = 1; z < wordIdList.Count; z++)
                        {
                            word = wordIdList[z];
                            last_word = wordIdList[z - 1];
                            termCnt++;
                            if ((termCnt % 10000) == 0)
                            {
                                Console.WriteLine("Iter: {0}\tAlpha: {1:F}\tEntropy: {2:F}\tLine: {3}", iter, alpha, -logp / Math.Log10(2) / termCnt, queryCnt);
                            }

                            computeNet(last_word, word);      //compute probability distribution

                            if (word != -1) logp += Math.Log10(neu2[vocab[word].class_index + vocab_size].ac * neu2[word].ac);

                            if ((logp != logp) || (double.IsInfinity(logp) == true))
                            {
                                Console.WriteLine("Numerical error {0} {1} {2}", word, neu2[word].ac, neu2[vocab[word].class_index + vocab_size].ac);
                                return;
                            }

                            //
                            if (bptt > 0)
                            {
                                //shift memory needed for bptt to next time step
                                for (a = bptt + bptt_block - 1; a > 0; a--)
                                {
                                    bptt_history[a] = bptt_history[a - 1];
                                }
                                bptt_history[0] = last_word;

                                for (a = bptt + bptt_block - 1; a > 0; a--)
                                {
                                    for (b = 0; b < layer1_size; b++)
                                    {
                                        bptt_hidden[a * layer1_size + b].ac = bptt_hidden[(a - 1) * layer1_size + b].ac;
                                        bptt_hidden[a * layer1_size + b].er = bptt_hidden[(a - 1) * layer1_size + b].er;
                                    }
                                }
                            }
                            //
                            learnNet(last_word, word, termCnt);

                            copyHiddenLayerToInput();

                            if (last_word != -1) neu0[last_word].ac = 0;  //delete previous activation

                            for (a = MAX_NGRAM_ORDER - 1; a > 0; a--)
                            {
                                history[a] = history[a - 1];
                            }
                            history[0] = word;
                        }
                        netReset();
                    }
                }
                sr.Close();

                if (one_iter == 1)
                {	//no validation data are needed and network is always saved with modified weights
                    Console.WriteLine();
                    logp = 0;
                    saveNet(strModelFile);
                    break;
                }

                //VALIDATION PHASE
                netFlush();

                StreamReader fi = new StreamReader(strValidCorpus);

                last_word = 0;
                logp = 0;
                wordcn = 0;
                while (true)
                {
                    word = readWordIndex(fi);     //read next word
                    computeNet(last_word, word);      //compute probability distribution
                    if (fi.EndOfStream == true) break;        //end of file: report LOGP, PPL

                    if (word != -1)
                    {
                        logp += Math.Log10(neu2[vocab[word].class_index + vocab_size].ac * neu2[word].ac);
                        wordcn++;
                    }

                    copyHiddenLayerToInput();

                    if (last_word != -1) neu0[last_word].ac = 0;  //delete previous activation

                    last_word = word;

                    for (a = MAX_NGRAM_ORDER - 1; a > 0; a--) history[a] = history[a - 1];
                    history[0] = last_word;

                    if (word == 0)
                    {
                        netReset();
                        last_word = 0;
                    }
                }
                fi.Close();

                Console.WriteLine("VALID entropy: {0:F}", -logp / Math.Log10(2) / wordcn);

                train_cur_pos = 0;

                if (logp < llogp)
                    restoreWeights();
                else
                    saveWeights();

                if (logp * min_improvement < llogp)
                {
                    if (alpha_divide == 0) alpha_divide = 1;
                    else
                    {
                        saveNet(strModelFile);
                        break;
                    }
                }

                if (alpha_divide != 0) alpha /= 2;

                llogp = logp;
                logp = 0;
                iter++;
                saveNet(strModelFile);
            }
        }

    }
}
