﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Threading;
using System.Threading.Tasks;

namespace CRFEncoder
{
    public class Encoder
    {
        BigArray<float> alpha;

        public Encoder()
        {

        }

        public bool learn(string templfile,
                    string trainfile,
                    string modelfile,
                    int maxitr,
                    int freq,
                    float eta,
                    float C,
                    int thread_num)
        {
            if (!(eta > 0.0))
            {
                Console.WriteLine("eta must be > 0.0");
            }

            if (!(C >= 0.0))
            {
                Console.WriteLine("C must be >= 0.0");
            }

            if (!(thread_num > 0))
            {
                Console.WriteLine("thread must be > 0");
            }


            EncoderFeatureIndex feature_index = new EncoderFeatureIndex(thread_num);
            List<TaggerImpl> x = new List<TaggerImpl>();
            feature_index.open(templfile, trainfile);
            StreamReader ifs = new StreamReader(trainfile);
            int line = 0;

            Console.WriteLine("Reading training data: ");
            while (ifs.EndOfStream == false)
            {
                TaggerImpl _x = new TaggerImpl();
                _x.open(ref feature_index);
                if (_x.read(ref ifs) == false || _x.shrink() == false)
                {
                    Console.WriteLine("Load a training sentence failed.");
                }
                if (_x.xsize_ == 0)
                {
                    //Empty record, and skip it
                    continue;
                }
                x.Add(_x);
                if (++line % 10000 == 0)
                {
                    Console.WriteLine("{0}...", line);
                }

            }
            ifs.Close();
            Console.WriteLine();

            feature_index.shrink(freq);
            if (!feature_index.saveHeader(modelfile))
            {
                Console.WriteLine("Save model file header failed!");
            }
            feature_index.dic_ = null;
            GC.Collect();
            Console.WriteLine("Loading training data done!");

            alpha = new BigArray<float>(feature_index.size(), 1);
            feature_index.set_alpha(ref alpha);

            Console.WriteLine("Number of sentences: " + x.Count);
            Console.WriteLine("Number of features:  " + feature_index.size());
            Console.WriteLine("Number of thread(s): " + thread_num);
            Console.WriteLine("Freq:                " + freq);
            Console.WriteLine("eta:                 " + eta);
            Console.WriteLine("C:                   " + C);

            if (!runCRF(ref x, ref feature_index, maxitr, C, eta, thread_num))
            {
                Console.WriteLine("CRF_L2 execute error");
            }
            if (!feature_index.saveBody(modelfile))
            {
                Console.WriteLine("Save model body failed!");
            }
            Console.WriteLine("\nDone!");

            return true;
        }

        static object ll = new object();

        bool runCRF(ref List<TaggerImpl> x,
            ref EncoderFeatureIndex feature_index,
            int maxitr,
            float C,
            float eta,
            int thread_num)
        {
            float old_obj = 1e+37f;
            int converge = 0;
            LBFGS lbfgs = new LBFGS();
            List<CRFEncoderThread> processList = new List<CRFEncoderThread>();

            for (int z = 0; z < thread_num; z++)
            {
                processList.Add(new CRFEncoderThread());
            }

            for (int i = 0; i < thread_num; i++)
            {
                processList[i].start_i = i;
                processList[i].size = x.Count;
                processList[i].thread_num = thread_num;
                processList[i].x = x;
                processList[i].expected = new BigArray<float>(feature_index.size(), 1);
                processList[i].lbfgs = lbfgs;
                processList[i].Init();
            }

            int all = 0;
            for (int i = 0; i < x.Count; ++i)
            {
                all += x[i].xsize_;
            }

            //Iterative training
            lbfgs.expected = new BigArray<float>(feature_index.size(), 1);
            DateTime startDT = DateTime.Now;

            for (int itr = 0; itr < maxitr; ++itr)
            {
                //Clear result container
                lbfgs.obj = 0.0f;
                lbfgs.err = 0;
                lbfgs.zeroone = 0;
                lbfgs.expected.Clear();

                List<Thread> threadList = new List<Thread>();
                for (int z = 0; z < thread_num; z++)
                {
                    Thread thread = new Thread(new ThreadStart(processList[z].Run));
                    thread.Start();
                    threadList.Add(thread);
                }

                int[,] merr;
                int[] yfreq;
                merr = new int[feature_index.y_.Count, feature_index.y_.Count];
                yfreq = new int[feature_index.y_.Count];
                for (int i = 0; i < thread_num; ++i)
                {
                    threadList[i].Join();
                    lbfgs.obj += processList[i].obj;
                    lbfgs.err += processList[i].err;
                    lbfgs.zeroone += processList[i].zeroone;
                    
                    //Calculate error
                    for (int j = 0; j < feature_index.y_.Count; j++)
                    {
                        yfreq[j] += processList[i].yfreq[j];
                        for (int k = 0; k < feature_index.y_.Count; k++)
                        {
                            merr[j, k] += processList[i].merr[j, k];
                        }
                    }
                }

                int fsize = feature_index.size();                
                Parallel.For<float>(1, fsize + 1, () => 0, (k, loop, subtotal) =>
               {
                   subtotal += (alpha[k] * alpha[k] / (2.0f * C));
                   lbfgs.expected[k] += alpha[k] / C;
                   for (int i = 0; i < thread_num; i++)
                   {
                       lbfgs.expected[k] += processList[i].expected[k];
                   }
               
                   return subtotal;
               },
               (subtotal) => // Accumulator
               {
                   lock (ll)
                   {
                       lbfgs.obj += subtotal;
                   }
               }
               );

                float diff = (itr == 0 ? 1.0f : Math.Abs(old_obj - lbfgs.obj) / old_obj);
                old_obj = lbfgs.obj;

                ShowEvaluation(x, feature_index, lbfgs, all, itr, merr, yfreq, diff, startDT);
                if (diff < eta)
                {
                    converge++;
                }
                else
                {
                    converge = 0;
                }
                if (itr > maxitr || converge == 3)
                {
                    break;  // 3 is ad-hoc
                }

                int iret;
                iret = lbfgs.optimize(feature_index.size(), ref alpha, C);
                if (iret <= 0)
                {
                    return false;
                }
            }

            return true;
        }

        private static void ShowEvaluation(List<TaggerImpl> x, EncoderFeatureIndex feature_index, LBFGS lbfgs, int all, int itr, int[,] merr, int[] yfreq, float diff, DateTime startDT)
        {
            TimeSpan ts = DateTime.Now - startDT;
            for (int i = 0; i < feature_index.y_.Count; i++)
            {
                int total_merr = 0;
                SortedDictionary<double, List<string>> sdict = new SortedDictionary<double, List<string>>();
                for (int j = 0; j < feature_index.y_.Count; j++)
                {
                    total_merr += merr[i, j];
                    double v = (double)merr[i, j] / (double)yfreq[i];
                    if (v > 0.0001)
                    {
                        if (sdict.ContainsKey(v) == false)
                        {
                            sdict.Add(v, new List<string>());
                        }
                        sdict[v].Add(feature_index.y_[j]);
                    }
                }
                double vet = (double)total_merr / (double)yfreq[i];
                vet = vet * 100.0;

                Console.ForegroundColor = ConsoleColor.Green;
                Console.Write("{0} ", feature_index.y_[i]);
                Console.ResetColor();
                Console.Write("[FR={0}, TE=", yfreq[i]);
                Console.ForegroundColor = ConsoleColor.Yellow;
                Console.Write("{0:0.00}%", vet);
                Console.ResetColor();
                Console.WriteLine("]");

                int n = 0;
                foreach (KeyValuePair<double, List<string>> pair in sdict.Reverse())
                {
                    foreach (string item in pair.Value)
                    {
                        n += item.Length + 1 + 7;
                        if (n > 80)
                        {
                            //only show data in one line, more data in tail will not be show.
                            break;
                        }
                        Console.Write("{0}:", item);
                        Console.ForegroundColor = ConsoleColor.Red;
                        Console.Write("{0:0.00}% ", pair.Key * 100);
                        Console.ResetColor();
                    }
                    if (n > 80)
                    {
                        break;
                    }
                }
                Console.WriteLine();
            }

            Console.WriteLine("iter={0} terr={1:0.000000} serr={2:0.000000} diff={3} fsize={4}", itr, 1.0 * lbfgs.err / all, 1.0 * lbfgs.zeroone / x.Count, diff, feature_index.size());
            Console.WriteLine("Time span: {0}, Aver. time span per iter: {1}", ts, new TimeSpan(0, 0, (int)(ts.TotalSeconds / (itr + 1))));
        }
    }
}
