﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
using LDASharp;

namespace GibbsLDASharp.LDA
{
    public class Model
    {
        #region	Class Variables

        public static String tassignSuffix;	//suffix for topic assignment file
        public static String thetaSuffix;		//suffix for theta (topic - document distribution) file
        public static String phiSuffix;		//suffix for phi file (topic - word distribution) file
        public static String othersSuffix; 	//suffix for containing other parameters
        public static String twordsSuffix;		//suffix for file containing words-per-topics

        #endregion
        
        #region	Model Parameters and Variables
        
        public String wordMapFile; 		//file that contain word to id map
        public String trainlogFile; 	//training log file	

        public String dir;
        public String dfile;
        public String modelName;
        public int modelStatus; 		//see Constants class for status of model
        public LDADataset data;			// link to a dataset

        public int M; //dataset size (i.e., number of docs)
        public int V; //vocabulary size
        public int K; //number of topics
        public double alpha, beta; //LDA  hyperparameters
        public int niters; //number of Gibbs sampling iteration
        public int liter; //the iteration at which the model was saved	
        public int savestep; //saving period
        public int twords; //print out top words per each topic
        public int withrawdata;

        // Estimated/Inferenced parameters
        public double[,] theta; //theta: document - topic distributions, size M x K
        public double[,] phi; // phi: topic-word distributions, size K x V

        // Temp variables while sampling
        public List<int>[] z; //topic assignments for words, size M x doc.size()
        public int[,] nw; //nw[i][j]: number of instances of word/term i assigned to topic j, size V x K
        public int[,] nd; //nd[i][j]: number of words in document i assigned to topic j, size M x K
        public int[] nwsum; //nwsum[j]: total number of words assigned to topic j, size K
        public int[] ndsum; //ndsum[i]: total number of words in document i, size M

        // temp variables for sampling
        public double[] p;

        #endregion

        
        #region	Constructors
        
        public Model()
        {
            SetDefaultValues();
        }

        /// <summary>
        /// Set default values for variables
        /// </summary>
        public void SetDefaultValues()
        {
            wordMapFile = "wordmap.txt";
            trainlogFile = "trainlog.txt";
            tassignSuffix = ".tassign";
            thetaSuffix = ".theta";
            phiSuffix = ".phi";
            othersSuffix = ".others";
            twordsSuffix = ".twords";

            dir = "./";
            dfile = "trndocs.dat";
            modelName = "model-final";
            modelStatus = Constants.MODEL_STATUS_UNKNOWN;

            M = 0;
            V = 0;
            K = 100;
            alpha = 50.0 / K;
            beta = 0.1;
            niters = 2000;
            liter = 0;

            z = null;
            nw = null;
            nd = null;
            nwsum = null;
            ndsum = null;
            theta = null;
            phi = null;
        }
        #endregion

        
        #region	I/O Methods

        /// <summary>
        /// Read other file to get parameters
        /// </summary>
        protected bool ReadOthersFile(string otherFile)
        {
            //open file <model>.others to read:

            try
            {
                StreamReader reader = new StreamReader(otherFile);
                string line;
                while ((line = reader.ReadLine()) != null)
                {
                    string[] tknr = line.Split(new char[] { '\t', '\r', '\n' });


                    int count = tknr.Count();
                    if (count != 2)
                        continue;

                    string optstr = tknr[0];
                    string optval = tknr[1];

                    if (optstr.Equals("alpha", StringComparison.CurrentCultureIgnoreCase))
                    {
                        alpha = Double.Parse(optval);
                    }
                    else if (optstr.Equals("beta", StringComparison.CurrentCultureIgnoreCase))
                    {
                        beta = Double.Parse(optval);
                    }
                    else if (optstr.Equals("ntopics", StringComparison.CurrentCultureIgnoreCase))
                    {
                        K = int.Parse(optval);
                    }
                    else if (optstr.Equals("liter", StringComparison.CurrentCultureIgnoreCase))
                    {
                        liter = int.Parse(optval);
                    }
                    else if (optstr.Equals("nwords", StringComparison.CurrentCultureIgnoreCase))
                    {
                        V = int.Parse(optval);
                    }
                    else if (optstr.Equals("ndocs", StringComparison.CurrentCultureIgnoreCase))
                    {
                        M = int.Parse(optval);
                    }
                    else
                    {
                        // any more?
                    }
                }

                reader.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while reading other file:" + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        protected bool ReadTAssignFile(string tassignFile)
        {
            try
            {
                int i, j;
                StreamReader reader = new StreamReader(tassignFile, Encoding.UTF8);

                string line;
                z = new List<int>[M];
                data = new LDADataset(M);
                data.V = V;
                for (i = 0; i < M; i++)
                {
                    line = reader.ReadLine();
                    string[] tknr = line.Split(new char[] { '\t', '\r', '\n' });

                    int length = tknr.Count();

                    List<int> words = new List<int>();
                    List<int> topics = new List<int>();

                    for (j = 0; j < length; j++)
                    {
                        string token = tknr[0];

                        string[] tknr2 = token.Split(new char[] { ':' });
                        if (tknr2.Count() != 2)
                        {
                            Console.WriteLine("Invalid word-topic assignment line");
                            return false;
                        }

                        words.Add(int.Parse(tknr2[0]));
                        topics.Add(int.Parse(tknr2[1]));
                    }//end for each topic assignment

                    //allocate and add new document to the corpus
                    Document doc = new Document(words);
                    data.SetDoc(doc, i);

                    //assign values for z
                    z[i] = new List<int>();
                    for (j = 0; j < topics.Count; j++)
                    {
                        z[i].Add(topics[j]);
                    }

                }//end for each doc

                reader.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while loading model: " + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        /// <summary>
        /// Load saved model
        /// </summary>
        /// <returns></returns>
        public bool LoadModel()
        {
            if (!ReadOthersFile(dir + (string.IsNullOrEmpty(dir) ? "" : (string.IsNullOrEmpty(dir) ? "" : @"\")) + modelName + othersSuffix))
                return false;

            if (!ReadTAssignFile(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + modelName + tassignSuffix))
                return false;

            // read dictionary
            LDADictionary dict = new LDADictionary();
            if (!dict.ReadWordMap(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + wordMapFile))
                return false;

            data.localDict = dict;

            return true;
        }

        /// <summary>
        /// Save word-topic assignments for this model
        /// </summary>
        /// <param name="filename"></param>
        /// <returns></returns>
        public bool SaveModelTAssign(string filename)
        {
            int i, j;

            try
            {
                StreamWriter writer = new StreamWriter(filename);

                //write docs with topic assignments for words
                for (i = 0; i < data.M; i++)
                {
                    for (j = 0; j < data.docs[i].length; ++j)
                    {
                        writer.Write(data.docs[i].words[j] + ":" + z[i][j] + " ");
                    }
                    writer.Write("\n");
                }

                writer.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while saving model tassign: " + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        /// <summary>
        /// Save theta (topic distribution) for this model
        /// </summary>
        /// <param name="filename"></param>
        /// <returns></returns>
        public bool SaveModelTheta(string filename)
        {
            try
            {
                StreamWriter writer = new StreamWriter(filename);
                for (int i = 0; i < M; i++)
                {
                    for (int j = 0; j < K; j++)
                    {
                        writer.Write(theta[i, j] + " ");
                    }
                    writer.Write("\n");
                }
                writer.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while saving topic distribution file for this model: " + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        /// <summary>
        /// Save word-topic distribution
        /// </summary>
        /// <param name="filename"></param>
        /// <returns></returns>
        public bool SaveModelPhi(string filename)
        {
            try
            {
                StreamWriter writer = new StreamWriter(filename);

                for (int i = 0; i < K; i++)
                {
                    for (int j = 0; j < V; j++)
                    {
                        writer.Write(phi[i, j] + " ");
                    }
                    writer.Write("\n");
                }
                writer.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while saving word-topic distribution:" + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        /// <summary>
        /// Save other information of this model
        /// </summary>
        /// <param name="filename"></param>
        /// <returns></returns>
        public bool SaveModelOthers(string filename)
        {
            try
            {
                StreamWriter writer = new StreamWriter(filename);

                writer.WriteLine("alpha=" + alpha);
                writer.WriteLine("beta=" + beta);
                writer.WriteLine("ntopics=" + K);
                writer.WriteLine("ndocs=" + M);
                writer.WriteLine("nwords=" + V);
                writer.WriteLine("liters=" + liter);

                writer.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while saving model others:" + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        /// <summary>
        /// Save model the most likely words for each topic
        /// </summary>
        /// <param name="filename"></param>
        /// <returns></returns>
        public bool SaveModelTwords(string filename)
        {
            try
            {
                StreamWriter writer = new StreamWriter(filename, false, Encoding.UTF8);

                if (twords > V)
                {
                    twords = V;
                }

                for (int k = 0; k < K; k++)
                {
                    List<Pair> wordsProbsList = new List<Pair>();
                    for (int w = 0; w < V; w++)
                    {
                        Pair p = new Pair(w, phi[k, w], false);

                        wordsProbsList.Add(p);
                    }//end foreach word

                    //print topic				
                    writer.Write("Topic " + k + "th:\n");
                    wordsProbsList.Sort();

                    for (int i = 0; i < twords; i++)
                    {
                        if (data.localDict.Contains((int)wordsProbsList[i].first))
                        {
                            string word = data.localDict.GetWord((int)wordsProbsList[i].first);

                            writer.WriteLine("\t" + word + " " + wordsProbsList[i].second);
                        }
                    }
                } //end foreach topic			

                writer.Close();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error while saving model twords: " + e.Message);
                Console.WriteLine(e.StackTrace);
                return false;
            }
            return true;
        }

        /// <summary>
        /// Save model
        /// </summary>
        /// <param name="modelName"></param>
        /// <returns></returns>
        public bool SaveModel(string modelName)
        {
            if (!SaveModelTAssign(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + modelName + tassignSuffix))
            {
                return false;
            }

            if (!SaveModelOthers(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + modelName + othersSuffix))
            {
                return false;
            }

            if (!SaveModelTheta(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + modelName + thetaSuffix))
            {
                return false;
            }

            if (!SaveModelPhi(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + modelName + phiSuffix))
            {
                return false;
            }

            if (twords > 0)
            {
                if (!SaveModelTwords(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + modelName + twordsSuffix))
                    return false;
            }
            return true;
        }
        #endregion
        
        #region	Init Methods
        
        /// <summary>
        /// initialize the model
        /// </summary>
        /// <param name="option"></param>
        /// <returns></returns>
        protected bool Init(LDACmdOption option)
        {
            if (option == null)
                return false;

            modelName = option.modelName;
            K = option.K;

            alpha = option.alpha;
            if (alpha < 0.0)
                alpha = 50.0 / K;

            if (option.beta >= 0)
                beta = option.beta;

            niters = option.niters;

            dir = option.dir;
            if (dir.EndsWith(@"\"))
                dir = dir.Substring(0, dir.Length - 1);

            dfile = option.dfile;
            twords = option.twords;
            wordMapFile = option.wordMapFileName;

            return true;
        }

        /// <summary>
        /// Init parameters for estimation
        /// </summary>
        /// <param name="option"></param>
        /// <returns>true if success and false otherwise</returns>
        public bool InitNewModel(LDACmdOption option)
        {
            if (!Init(option))
                return false;

            int m, n, w, k;
            p = new double[K];

            data = LDADataset.ReadDataSet(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + dfile);
            if (data == null)
            {
                Console.WriteLine("Fail to read training data!");
                return false;
            }

            //+ allocate memory and assign values for variables		
            M = data.M;
            V = data.V;
            dir = option.dir;
            savestep = option.savestep;

            Random rand = new Random();

            // K: from command line or default value
            // alpha, beta: from command line or default values
            // niters, savestep: from command line or default values

            nw = new int[V, K];
            for (w = 0; w < V; w++)
            {
                for (k = 0; k < K; k++)
                {
                    nw[w, k] = 0;
                }
            }

            nd = new int[M, K];
            for (m = 0; m < M; m++)
            {
                for (k = 0; k < K; k++)
                {
                    nd[m, k] = 0;
                }
            }

            nwsum = new int[K];
            for (k = 0; k < K; k++)
            {
                nwsum[k] = 0;
            }

            ndsum = new int[M];
            for (m = 0; m < M; m++)
            {
                ndsum[m] = 0;
            }

            z = new List<int>[M];
            for (m = 0; m < data.M; m++)
            {
                int N = data.docs[m].length;
                z[m] = new List<int>();

                //initilize for z
                for (n = 0; n < N; n++)
                {
                    int topic = (int)Math.Floor(rand.NextDouble() * K);
                    z[m].Add(topic);

                    // number of instances of word assigned to topic j
                    nw[data.docs[m].words[n], topic] += 1;
                    // number of words in document i assigned to topic j
                    nd[m, topic] += 1;
                    // total number of words assigned to topic j
                    nwsum[topic] += 1;
                }
                // total number of words in document i
                ndsum[m] = N;
            }

            theta = new double[M, K];
            phi = new double[K, V];

            return true;
        }


        /// <summary>
        /// Init parameters for inference
        /// </summary>
        /// <param name="option"></param>
        /// <param name="newData">DataSet for which we do inference</param>
        /// <param name="trnModel">the trained model</param>
        /// <returns>true if success and false otherwise</returns>
        public bool InitNewModel(LDACmdOption option, LDADataset newData, Model trnModel)
        {
            if (!Init(option))
                return false;

            int m, n, w, k;

            K = trnModel.K;
            alpha = trnModel.alpha;
            beta = trnModel.beta;

            p = new double[K];
            Console.WriteLine("K:" + K);

            data = newData;

            Random rand = new Random();
            //+ allocate memory and assign values for variables		
            M = data.M;
            V = data.V;
            dir = option.dir;
            savestep = option.savestep;
            Console.WriteLine("M:" + M);
            Console.WriteLine("V:" + V);

            // K: from command line or default value
            // alpha, beta: from command line or default values
            // niters, savestep: from command line or default values

            nw = new int[V, K];
            for (w = 0; w < V; w++)
            {
                for (k = 0; k < K; k++)
                {
                    nw[w, k] = 0;
                }
            }

            nd = new int[M, K];
            for (m = 0; m < M; m++)
            {
                for (k = 0; k < K; k++)
                {
                    nd[m, k] = 0;
                }
            }

            nwsum = new int[K];
            for (k = 0; k < K; k++)
            {
                nwsum[k] = 0;
            }

            ndsum = new int[M];
            for (m = 0; m < M; m++)
            {
                ndsum[m] = 0;
            }

            z = new List<int>[M];
            for (m = 0; m < data.M; m++)
            {
                int N = data.docs[m].length;
                z[m] = new List<int>();

                //initilize for z
                for (n = 0; n < N; n++)
                {
                    int topic = (int)Math.Floor(rand.NextDouble() * K);
                    z[m].Add(topic);

                    // number of instances of word assigned to topic j
                    nw[data.docs[m].words[n], topic] += 1;
                    // number of words in document i assigned to topic j
                    nd[m, topic] += 1;
                    // total number of words assigned to topic j
                    nwsum[topic] += 1;
                }
                // total number of words in document i
                ndsum[m] = N;
            }

            theta = new double[M, K];
            phi = new double[K, V];

            return true;
        }

        /// <summary>
        /// Init parameters for inference, reading new dataset from file
        /// </summary>
        /// <param name="option"></param>
        /// <param name="trnModel"></param>
        /// <returns>true if success and false otherwise</returns>
        public bool InitNewModel(LDACmdOption option, Model trnModel)
        {
            if (!Init(option))
                return false;

            LDADataset dataset = LDADataset.ReadDataSet(dir + (string.IsNullOrEmpty(dir) ? "" : @"\") + dfile, trnModel.data.localDict);
            if (dataset == null)
            {
                Console.WriteLine("Fail to read dataset!");
                return false;
            }

            return InitNewModel(option, dataset, trnModel);
        }


        /// <summary>
        /// Init parameter for continue estimating or for later inference,from options
        /// </summary>
        /// <param name="option"></param>
        /// <returns>true if success and false otherwise</returns>
        public bool InitEstimatedModel(LDACmdOption option)
        {
            if (!Init(option))
                return false;

            int m, n, w, k;

            p = new double[K];

            // load model, i.e., read z and trndata
            if (!LoadModel())
            {
                Console.WriteLine("Fail to load word-topic assignment file of the model!\n");
                return false;
            }

            Console.WriteLine("Model loaded:");
            Console.WriteLine("\talpha:" + alpha);
            Console.WriteLine("\tbeta:" + beta);
            Console.WriteLine("\tM:" + M);
            Console.WriteLine("\tV:" + V);

            nw = new int[V, K];
            for (w = 0; w < V; w++)
            {
                for (k = 0; k < K; k++)
                {
                    nw[w, k] = 0;
                }
            }

            nd = new int[M, K];
            for (m = 0; m < M; m++)
            {
                for (k = 0; k < K; k++)
                {
                    nd[m, k] = 0;
                }
            }

            nwsum = new int[K];
            for (k = 0; k < K; k++)
            {
                nwsum[k] = 0;
            }

            ndsum = new int[M];
            for (m = 0; m < M; m++)
            {
                ndsum[m] = 0;
            }

            for (m = 0; m < data.M; m++)
            {
                int N = data.docs[m].length;

                // assign values for nw, nd, nwsum, and ndsum
                for (n = 0; n < N; n++)
                {
                    w = data.docs[m].words[n];
                    int topic = (int)z[m][n];

                    // number of instances of word i assigned to topic j
                    nw[w, topic] += 1;
                    // number of words in document i assigned to topic j
                    nd[m, topic] += 1;
                    // total number of words assigned to topic j
                    nwsum[topic] += 1;
                }
                // total number of words in document i
                ndsum[m] = N;
            }

            theta = new double[M, K];
            phi = new double[K, V];
            dir = option.dir;
            savestep = option.savestep;

            return true;
        }
        #endregion

    }
}
