﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Cloo;
using System.Diagnostics.Contracts;
using System.Diagnostics;
using System.Runtime.InteropServices;

namespace SimpleCLML.LogisticRegression
{
    public sealed class LogisticRegressionTrainer
    {
        List<TrainingExample> examples;

        private int featureCount;
        private int exampleCount;

        public LogisticRegressionTrainer(IEnumerable<TrainingExample> inputExamples)
        {
            if (inputExamples == null || !inputExamples.Any())
                throw new ArgumentException("inputExamples is null or empty");
            examples = inputExamples.ToList();

            featureCount = examples.First().x.Length + 1;
            exampleCount = examples.Count;
        }

        public LogisticRegression Train(int maxIterations = 1280, float errorThreshold = 1.0e-6f, float learningRate = 0.5f)
        {
            //using (ILogisticRegressionSolver gd = new LogisticRegressionGradientDescent(maxIterations, errorThreshold, learningRate))
            using (ILogisticRegressionSolver gd = new LogisticRegressionLBFGS(maxIterations, errorThreshold, 256))
            {
                int exampleAlignment = gd.ExampleAlignmentRequirement;
                int featureAlignment = gd.FeatureAlignmentRequirement;
                int alignedFeatureCount = featureCount % featureAlignment == 0 ? featureCount : featureCount + featureAlignment - featureCount % featureAlignment;
                int alignedExampleCount = exampleCount % exampleAlignment == 0 ? exampleCount : exampleCount + exampleAlignment - exampleCount % exampleAlignment;

                /* matrices in column-major order */
                float[] X = new float[alignedFeatureCount * alignedExampleCount];
                float[] Y = new float[alignedExampleCount];

                for (int i = 0; i < examples.Count; ++i)
                {
                    var example = examples[i];
                    Y[i] = example.y ? 1.0f : 0.0f;
                    X[i] = 1.0f; // bias
                    for (int j = 1; j < featureCount; j++)
                    {
                        X[i + j * alignedExampleCount] = example.x[j - 1];
                    }
                }

                gd.Init(X, Y, exampleCount, null);
                X = null;
                Y = null;
                float[] theta = gd.Run();
                Array.Resize<float>(ref theta, featureCount); // cut extra items
                return new LogisticRegression(theta);
            }
        }


    }
}
