﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace GibbsLDASharp.LDA
{
    public class Estimator
    {

        // output model
        protected Model trnModel;
        LDACmdOption option;

        /// <summary>
        /// Initialize for est or estc, according to option param
        /// </summary>
        /// <param name="option"></param>
        /// <returns></returns>
        public bool Initialize(LDACmdOption option)
        {
            this.option = option;
            trnModel = new Model();

            if (option.est)
            {
                if (!trnModel.InitNewModel(option))
                    return false;
                trnModel.data.localDict.WriteWordMap(option.dir + (string.IsNullOrEmpty(option.dir) ? "" : @"\") + option.wordMapFileName);
            }
            else if (option.estc)
            {
                if (!trnModel.InitEstimatedModel(option))
                    return false;
            }

            return true;
        }

        public void Estimate(){
		Console.WriteLine("Sampling " + trnModel.niters + " iteration!");
		
		int lastIter = trnModel.liter;
		for (trnModel.liter = lastIter + 1; trnModel.liter < trnModel.niters + lastIter; trnModel.liter++){
			Console.WriteLine("Iteration " + trnModel.liter + " ...");
			
			// for all z_i
			for (int m = 0; m < trnModel.M; m++){				
				for (int n = 0; n < trnModel.data.docs[m].length; n++){
					// z_i = z[m][n]
					// sample from p(z_i|z_-i, w)
                    int topic = Sampling(m, n);
                    trnModel.z[m][n] = topic;
				}// end for each word
			}// end for each document
			
			if (option.savestep > 0){
				if (trnModel.liter % option.savestep == 0){
					Console.WriteLine("Saving the model at iteration " + trnModel.liter + " ...");
					ComputeTheta();
					ComputePhi();
					trnModel.SaveModel("model-" + Conversion.ZeroPad(trnModel.liter, 5));
				}
			}
		}// end iterations		
		
		Console.WriteLine("Gibbs sampling completed!\n");
		Console.WriteLine("Saving the final model!\n");
		ComputeTheta();
		ComputePhi();
		trnModel.liter--;
		trnModel.SaveModel("model-final");
	}

        /// <summary>
        /// Do sampling
        /// </summary>
        /// <param name="m">document number</param>
        /// <param name="n">word number</param>
        /// <returns></returns>
        private int Sampling(int m, int n)
        {
            // remove z_i from the count variable
            int topic = trnModel.z[m][n];
            int w = trnModel.data.docs[m].words[n];

            trnModel.nw[w, topic] -= 1;
            trnModel.nd[m, topic] -= 1;
            trnModel.nwsum[topic] -= 1;
            trnModel.ndsum[m] -= 1;

            double Vbeta = trnModel.V * trnModel.beta;
            double Kalpha = trnModel.K * trnModel.alpha;
            Random rand = new Random();

            //do multinominal sampling via cumulative method
            for (int k = 0; k < trnModel.K; k++)
            {
                trnModel.p[k] = (trnModel.nw[w, k] + trnModel.beta) / (trnModel.nwsum[k] + Vbeta) *
                        (trnModel.nd[m, k] + trnModel.alpha) / (trnModel.ndsum[m] + Kalpha);
            }

            // cumulate multinomial parameters
            for (int k = 1; k < trnModel.K; k++)
            {
                trnModel.p[k] += trnModel.p[k - 1];
            }

            // scaled sample because of unnormalized p[]
            double u = rand.NextDouble() * trnModel.p[trnModel.K - 1];

            for (topic = 0; topic < trnModel.K; topic++)
            {
                if (trnModel.p[topic] > u) //sample topic w.r.t distribution p
                    break;
            }

            // add newly estimated z_i to count variables
            trnModel.nw[w, topic] += 1;
            trnModel.nd[m, topic] += 1;
            trnModel.nwsum[topic] += 1;
            trnModel.ndsum[m] += 1;

            return topic;
        }

        private void ComputeTheta()
        {
            for (int m = 0; m < trnModel.M; m++)
            {
                for (int k = 0; k < trnModel.K; k++)
                {
                    trnModel.theta[m, k] = (trnModel.nd[m, k] + trnModel.alpha) / (trnModel.ndsum[m] + trnModel.K * trnModel.alpha);
                }
            }
        }

        private void ComputePhi()
        {
            for (int k = 0; k < trnModel.K; k++)
            {
                for (int w = 0; w < trnModel.V; w++)
                {
                    trnModel.phi[k, w] = (trnModel.nw[w, k] + trnModel.beta) / (trnModel.nwsum[k] + trnModel.V * trnModel.beta);
                }
            }
        }
    }
}
