﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;

namespace ListNetRanker
{
    public class Trainer
    {

        public static void Test(ListNetModule model, DataReader reader)
        {
            reader.reset();
            int total_sample = 0;
            int error_sample = 0;

            Sample sample = null;
            while ((sample = reader.getNextSample()) != null)
            {
                total_sample++;

                sample = model.GetNormalizer().normalize(sample);
                List<Document> doclist = sample.documents;
                List<IndexScore> rankedIndexScoreList = new List<IndexScore>();
                List<IndexScore> rawIndexScoreList = new List<IndexScore>();

                int pos = 0;
                foreach (Document item in doclist)
                {
                    double score = model.GetRankScore(item.features);
                    rankedIndexScoreList.Add(new ListNetRanker.IndexScore(pos, score));
                    rawIndexScoreList.Add(new IndexScore(pos, item.relevance));
                    pos++;
                }
                rankedIndexScoreList.Sort();
                rawIndexScoreList.Sort();

                if (rankedIndexScoreList[0].index != rawIndexScoreList[0].index)
                {
                    error_sample++;
                }
            }

            Console.WriteLine("Ranked top 1 Sample error rate: {0}/{1} [ {2}% ]", error_sample, total_sample, (double)error_sample / (double)total_sample * 100);
        }


        //Training model
        public static ListNetModule train(string strTrainingCorpus)
        {
            Console.WriteLine("Load training corpus and normalize features...");
            DataReader reader = new DataReader();
            reader.LoadSamplesFromFile(strTrainingCorpus);
            Normalizer nor = Normalizer.CreateNormalizer(reader);
            reader.reset();
            int featureSize = reader.getNextSample().documents[0].features.Count;

            //Initialize weight list
            double[] weights = new double[featureSize];
            for (int i = 0; i < featureSize; i++)
            {
                weights[i] = 0.0;
            }

            //Begin to training
            double[] oldweights = new double[weights.Length];
            for (int i = 0; i < Parameters.getEpochNum(); i++)
            {

                reader.reset();
                Sample sample = null;
                weights.CopyTo(oldweights, 0);

                while ((sample = reader.getNextSample()) != null)
                {
                    sample = nor.normalize(sample);
                    List<Document> doclist = sample.documents;

                    double[] Z = new double[doclist.Count];
                    double[] ExpYList = new double[doclist.Count];
                    double ExpYSum = 0.0;
                    for (int doc = 0; doc < doclist.Count; doc++)
                    {
                        Z[doc] = DotMultiply.dotMutply(weights, doclist[doc].features);
                        ExpYList[doc] = Math.Exp(doclist[doc].relevance);
                        ExpYSum += ExpYList[doc];
                    }


                    for (int v = 0; v < featureSize; v++)
                    {
                        double deltaWP1 = 0.0;
                        double deltaWP2 = 0.0;
                        double deltaWP3 = 0.0;
                        for (int doc = 0; doc < doclist.Count; doc++)
                        {
                            deltaWP1 -= ((ExpYList[doc]) * doclist[doc].features[v]);

                            double expz = Math.Exp(Z[doc]);

                            deltaWP2 += (expz * doclist[doc].features[v]);

                            deltaWP3 += expz;

                        }

                        double deltaW = deltaWP1 / ExpYSum + deltaWP2 / deltaWP3;
                        weights[v] -= Parameters.getStep() * deltaW;
                    }
                }

                //Calculate variance
                double sum = 0.0;
                for (int v = 0; v < weights.Length; v++)
                {
                    sum += Math.Pow(oldweights[v] - weights[v], 2);
                }
                Console.WriteLine("Finish training " + (i + 1) + "/" + Parameters.getEpochNum() + " Variance:" + sum);

                Console.WriteLine("Weight List: ");
                for (int v = 0; v < weights.Length; v++)
                {
                    Console.WriteLine("weight[{0}] = {1} (min:{2}, max:{3})", v, weights[v], nor.maxminList[v].getMin(), nor.maxminList[v].getMax());
                }

                Test(ListNetModule.CreateInstance(weights, nor), reader);
            }
            return ListNetModule.CreateInstance(weights, nor);
        }
    }
}
