#pragma warning disable 108
using System;
using System.Runtime.InteropServices;
using System.Collections.Generic;
using Cephei;
using Cephei.Core;
using Cephei.Core.Generic;
using Microsoft.FSharp.Core;
using Cephei.QL.Math;
namespace Cephei.QL.Math.Optimization
{
    /// <summary> 
	/// ! Using a given optimization algorithm (default is conjugate gradient),  \f[ min \{ r(x) : x in R^n \} \f]  where \f$ r(x) = |f(x)|^2 \f$ is the Euclidean norm of \f$ f(x) \f$ for some vector-valued function \f$ f \f$ from \f$ R^n \f$ to \f$ R^m \f$, \f[ f = (f_1, ..., f_m) \f] with \f$ f_i(x) = b_i - \phi(x,t_i) \f$ where \f$ b \f$ is the vector of target data and \f$ phi \f$ is a scalar function.  Assuming the differentiability of \f$ f \f$, the gradient of \f$ r \f$ is defined by \f[ grad r(x) = f'(x)^t.f(x) \f]
	/// </summary>
    [Guid ("E36438CF-5B3A-4282-A69C-7553E6F3AC00"),ComVisible(true)]
	public interface INonLinearLeastSquare 
	{
		///////////////////////////////////////////////////////////////
        // Methods
        //
        /// <summary> 
		/// 
		/// </summary>
		 Int32 ExitFlag {get;}
        /// <summary> 
		/// 
		/// </summary>
		 Int32 IterationsNumber {get;}
        /// <summary> 
		/// 
		/// </summary>
		 Double LastValue {get;}
        /// <summary> 
		/// 
		/// </summary>
		 Cephei.QL.Math.IArray Perform(Cephei.QL.Math.Optimization.ILeastSquareProblem lsProblem);
        /// <summary> 
		/// 
		/// </summary>
		 Double ResidualNorm {get;}
        /// <summary> 
		/// 
		/// </summary>
		 Cephei.QL.Math.IArray Results {get;}
        /// <summary> 
		/// 
		/// </summary>
		 INonLinearLeastSquare SetInitialValue(Cephei.QL.Math.IArray initialValue);
    }   

    /// <summary> 
	/// ! Using a given optimization algorithm (default is conjugate gradient),  \f[ min \{ r(x) : x in R^n \} \f]  where \f$ r(x) = |f(x)|^2 \f$ is the Euclidean norm of \f$ f(x) \f$ for some vector-valued function \f$ f \f$ from \f$ R^n \f$ to \f$ R^m \f$, \f[ f = (f_1, ..., f_m) \f] with \f$ f_i(x) = b_i - \phi(x,t_i) \f$ where \f$ b \f$ is the vector of target data and \f$ phi \f$ is a scalar function.  Assuming the differentiability of \f$ f \f$, the gradient of \f$ r \f$ is defined by \f[ grad r(x) = f'(x)^t.f(x) \f] Factory
	/// </summary>
   	[ComVisible(true)]
    public interface INonLinearLeastSquare_Factory 
    {
        ///////////////////////////////////////////////////////////////
        // Factory methods
        //
        /// <summary> 
		/// 
		/// </summary>
	    INonLinearLeastSquare Create (Cephei.QL.Math.Optimization.IConstraint c, Double accuracy, UInt64 maxiter, Cephei.QL.Math.Optimization.IOptimizationMethod om);
        /// <summary> 
		/// 
		/// </summary>
	    INonLinearLeastSquare Create (Cephei.QL.Math.Optimization.IConstraint c, Microsoft.FSharp.Core.FSharpOption<Double> accuracy, Microsoft.FSharp.Core.FSharpOption<UInt64> maxiter);
    }
}

