﻿// Learn more about F# at http://fsharp.net

#r @"C:\Program Files (x86)\Sho 2.0 for .NET 4\packages\Optimizer\Microsoft.Solver.Foundation.dll"
open Microsoft.SolverFoundation.Common
open Microsoft.SolverFoundation.Solvers
open Microsoft.SolverFoundation.Services
open System


module TestLBFGS = 
    let solverParams = new CompactQuasiNewtonSolverParams()
    let solver = new CompactQuasiNewtonSolver()
    //add variables
    let _, vidVaribaleX = solver.AddVariable(null)
    let _, vidVaribaleY = solver.AddVariable(null)

    //add a row and set it as the goal
    let _, vidRow = solver.AddRow(null)
    solver.AddGoal(vidRow, 0, true)

    let OriginalRosenbrockFunction = 
        let f (model:INonlinearModel)  (rowVid:int)  (values:ValuesByIndex)  (newValues:bool) = 
            Math.Pow(1. - values.[1], 2.) + 100. * (Math.Pow(values.[2] - (values.[1] * values.[1]), 2.))   
        new System.Func<INonlinearModel, int, ValuesByIndex, bool, float> (f)


    let OriginalRosenbrockGradient = 
        let f (model:INonlinearModel) (rowVid:int) (values:ValuesByIndex) (newValues:bool) (gradient:ValuesByIndex) = 
            gradient.[1] <- -2. * (1. - values.[1]) - 400. * values.[1] * (values.[2] - (values.[1] * values.[1]))
            gradient.[2] <- 200. * (values.[2] - (values.[1] * values.[1]))
        new System.Action<INonlinearModel, int, ValuesByIndex, bool, ValuesByIndex> (f)


    solver.FunctionEvaluator <- OriginalRosenbrockFunction
    solver.GradientEvaluator <- OriginalRosenbrockGradient
    solver.Solve(solverParams);

    Console.WriteLine(solver.ToString());


module Net = 
    open System.Net

    let fetchUrlSimple (url:string) = 
        let req = WebRequest.Create(url) 
        let response = req.GetResponse()
        use stream = response.GetResponseStream()
        use streamreader = new System.IO.StreamReader(stream)
        streamreader.ReadToEnd()
     
type dataset = 
    { features: float array array; // (instance = float array) array
      mutable labels: int array; // 
    }
    with
    member x.NumSamples = x.features.Length
    
module LogReg = 
    let mutable lambda = 0.1

    let dotProduct (x:float array) (g:ValuesByIndex) =
        let mutable dot = 0.0
        for i=0 to x.Length-1 do
            dot <- dot + x.[i] * g.[i+1]
        dot

    let sigmoid (x:float array) (y:int) (g:ValuesByIndex) = 
        let mutable dot = 0.0
        for i=0 to x.Length-1 do
            dot <- dot + x.[i] * g.[i+1]
        let z = (float y) * dot
        (* code to deal with extream numbers, but with some issues with l(w) calculation
        if z > 30.0 then
            1.0
        elif z < -30.0 then
            0.0
        else
        *)
        1.0 / (1.0 + exp (- z))

    let sigmoid2 (x:float array) (y:int) (g:float array) = 
        let mutable dot = 0.0
        for i=0 to x.Length-1 do
            dot <- dot + x.[i] * g.[i]
        1.0 / (1.0 + exp (- (float y) * dot))

        
    let logregValue(ds:dataset) = 
        let dim = ds.features.[0].Length

        let f (model:INonlinearModel)  (rowVid:int)  (values:ValuesByIndex)  (newValues:bool) = 
            let mutable L = 0.0
            
            for i=1 to dim do
                L <- L + values.[i]*values.[i]
            L <- - (L * lambda / 2.0)
            
            for i=0 to ds.features.Length-1 do
                //L <- L - log (1.0 + exp (- (float ds.labels.[i]) * dotProduct ds.features.[i] values ))
                L <- L + log (sigmoid ds.features.[i] ds.labels.[i] values)

            // printfn "L = %.10f" L
            L
            
        new System.Func<INonlinearModel, int, ValuesByIndex, bool, float> (f)

    let logregGradient(ds:dataset) = 
        // printfn "gradient calculated"
        let dim = ds.features.[0].Length
        let f (model:INonlinearModel) (rowVid:int) (values:ValuesByIndex) (newValues:bool) (gradient:ValuesByIndex) = 
            for j=1 to dim do 
                gradient.[j] <- -lambda * values.[j]
            
            for i=0 to ds.features.Length-1 do
                let coef = (1.0 - sigmoid ds.features.[i] ds.labels.[i] values) * (float ds.labels.[i])
                for j=1 to dim do 
                    gradient.[j] <- gradient.[j] + coef * ds.features.[i].[j-1]

        new System.Action<INonlinearModel, int, ValuesByIndex, bool, ValuesByIndex> (f)



    let makeSolver(ds:dataset) = 

        // set the solver parameters 
        let solverParams = new CompactQuasiNewtonSolverParams()
        //solverParams.IterationLimit <- 11
        //solverParams.Tolerance <- 1e-4
        // the memory parameter m, default 17
        solverParams.IterationsToRemember <- 10

        // solver
        let solver = new CompactQuasiNewtonSolver()

        let dim = ds.features.[0].Length
        // add variables
        for i=1 to dim do 
            solver.AddVariable(null) |> ignore
        
        // add a row and set it as the goal
        let _, vidRow = solver.AddRow(null)
        solver.AddGoal(vidRow, 0, false) |> ignore

        // set funcation evaluator and gradient evaluator
        solver.FunctionEvaluator <- logregValue(ds)
        solver.GradientEvaluator <- logregGradient(ds)
        
        // solve!
        solver.Solve(solverParams) |> ignore
        
        // get the optimized point (w) in the solver
        let w = Array.create dim 0.0
        for i=1 to dim do
            w.[i-1] <- solver.GetValue(i).ToDouble()
        w


module TestLogReg = 
    let iris = 
        let page = Net.fetchUrlSimple @"http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data"
        let lines = page.Split([|'\n'|], StringSplitOptions.RemoveEmptyEntries)
        let getIrisId (s:string) =     
            if s = "Iris-setosa" then
                2
            elif s = "Iris-versicolor" then
                -1
            else
                1

        let instances = 
            lines
            |> Array.map (fun line -> 
                let s = line.Split ','
                let features = [| "1.0"; s.[0]; s.[1]; s.[2]; s.[3] |] |> Array.map float // add a dumy 1.0 into the fatures 
                let label = getIrisId s.[4]
                features, label
                )
            |> Array.filter (fun (_, label) -> label <> 2)
        
        let F, L = Array.unzip instances
        {
            features = F;
            labels = L;
        }

    let sol = LogReg.makeSolver(iris)
    let nWrongs = ref 0
    for i=0 to iris.features.Length-1 do
        let prob = LogReg.sigmoid2 iris.features.[i] iris.labels.[i] sol
        printfn "L = %A sig value = %A label = %A" iris.labels.[i] prob (if prob > 0.5 then "correct" else incr nWrongs; "wrong")





