﻿namespace CMakeParsing.Interface
//    CMake Classifier:  Syntax coloring and highlighting Visual Studio Extension
//    Copyright (C) 2011 Steven Velez
//
//    This file is part of CMake Classifier
//
//    CMake Classifier is free software: you can redistribute it and/or modify
//    it under the terms of the GNU Lesser General Public License as published by
//    the Free Software Foundation, either version 3 of the License, or
//    (at your option) any later version.
//
//    This program is distributed in the hope that it will be useful,
//    but WITHOUT ANY WARRANTY; without even the implied warranty of
//    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
//    GNU Lesser General Public License for more details.
//
//    You should have received a copy of the GNU Lesser General Public License
//    along with this program.  If not, see <http://www.gnu.org/licenses/>.

open System.Collections.Generic
open CMakeParsing.ExpressionParser.Ast
open CMakeParsing.ExpressionParser
open Microsoft.FSharp.Text.Lexing

type public StyleToken = 
    struct 
        val startIdx: int
        val length : int
        val styleClass : string
        new (st: int, l: int, stl: string) = { 
            startIdx = st
            length = l
            styleClass = stl }
    end

type private ScopeEntry = {
    offset : int
    description : ClassResult
}

type public SyntaxStyleModel() =
    // Track the token-ish things that change the tokenization scope
    let _scopes = new LinkedList<ScopeEntry>()

    // Make a token function cast to obj callable again
    member private this.fnCast ( fn: obj ) =
        fn :?> char LexBuffer->ClassResult

    // Centralize logic for updating and creating result lists 
    member private this.updateCalls(start, position, length, className, fn,  
                                    outfn: (char LexBuffer->ClassResult) byref) =
        outfn <- this.fnCast fn
        if start >= 0 then
            Some(new StyleToken( start + position, length, className ))
        else
            None

    // Get a sequence of linked list NODES, since treating the linked list
    // as a sequence gives you the values, and those aren't efficiently useful
    // for stuff like remove.
    member private this.scopeNodesSequence () =
        let current = ref _scopes.First
        Seq.initInfinite (fun _ ->
            let last = !current
            if null <> last then
                current := last.Next
            last )

    // Put a new scope token where it belongs in the list of scopes.
    member private this.PopulateScopes res sOff = 
        let insertRec = { offset = sOff; description = res }
        match _scopes.Last with
        | null -> _scopes.AddLast( insertRec ) |> ignore
        | x when (x.Value.offset < insertRec.offset ) -> _scopes.AddLast( insertRec ) |> ignore
        | _ -> _scopes.AddBefore( this.scopeNodesSequence() |> Seq.find (fun n -> n.Value.offset >= insertRec.offset ), insertRec ) |> ignore

    // Handle a token that opens a scope
    member private this.OpenScope( res, s, pos, length, className, fn, 
                                   outfn: (char LexBuffer->ClassResult) byref, bPopulate) =
        if bPopulate then
            this.PopulateScopes res (s + pos)
        this.updateCalls( s, pos, length, className, fn, &outfn )

    // Handle a token that closes a scope
    member private this.CloseScope( res, s, pos, length, className, fn, 
                                    outfn: (char LexBuffer->ClassResult) byref, bPopulate) =
        if bPopulate then
            this.PopulateScopes res (s + pos)
        this.updateCalls( s, pos, length, className, fn, &outfn )

    // handle a close string scope token, since this thing usually has more than just the scope closer
    member private this.CloseStringScope( res, s, pos, length, className, fn, 
                                          outfn: (char LexBuffer->ClassResult) byref, bPopulate) =
        if bPopulate then
            this.PopulateScopes res ( s + pos + length - 1 )
        this.updateCalls( s, pos, length, className, fn, &outfn )

    // Get the tokenizer function from a Class result
    // ( I wonder if there is a more elegant way to do this without full-blown classes )
    member private this.fnFromClassResult ( res: ScopeEntry ) =
        match res.description with
        | StringBegin( _, _, _, fn ) -> this.fnCast fn
        | StringEnd( _, _, _, fn ) -> this.fnCast fn
        | ParamsBegin( _, _, _, fn ) -> this.fnCast fn
        | ParamsEnd( _, _, _, fn ) -> this.fnCast fn
        | _ -> Lexer.token

    // Use the scopes to find out which tokenizer we should start a span with.
    member private this.lookupTokenizer offset length =
        let options = _scopes |> Seq.takeWhile (fun x -> x.offset < offset ) 
        if  Seq.isEmpty options then
            Lexer.token
        else 
            options |> Seq.nth (Seq.length options - 1) |> this.fnFromClassResult

    // Let the model know that some of the old text has gone away
    member public this.EraseRange( rangeStart: int, rangeEnd: int, changeDelta: int ) =
        this.scopeNodesSequence()
        |> Seq.takeWhile (fun node -> node <> null )
        |> Seq.choose (fun node -> 
            let nodeOffset = node.Value.offset
            // yes... a side effect... so?
            if nodeOffset >= rangeEnd then 
                node.Value <- { node.Value with offset = node.Value.offset + changeDelta }
            if nodeOffset >= rangeStart && nodeOffset < rangeEnd then Some(node) else None )
        |> Seq.iter (fun node -> _scopes.Remove( node ) )
                
    // Provide StyleTokens representing the provided text 
    member public this.TokensFor( pos: int, text: string, freshScanLine: bool) =
        let lexbuf = LexBuffer<char>.FromString(text)

        // Scan the incoming string
        let nextTokenFn = ref (this.lookupTokenizer pos text.Length)

        // First step makes a sequence form the lexbuf that never ends
        Seq.initInfinite( fun _ ->
            if lexbuf.IsPastEndOfStream then
                // No more to parse, so tell the sequen generator reader to stop reading.
                None
            else
                let result = !nextTokenFn lexbuf
                Some(
                    match result with
                        | StringBegin( s, l, c,  fn ) -> 
                            this.OpenScope(result, s, pos, l, c, fn, nextTokenFn, freshScanLine)
                        | StringEnd( s, l, c,  fn ) -> 
                            this.CloseStringScope(result, s, pos, l, c, fn, nextTokenFn, freshScanLine)
                        | ParamsBegin( s, l, c,  fn ) -> 
                            this.OpenScope(result, s, pos, l, c, fn, nextTokenFn, freshScanLine)
                        | ParamsEnd( s, l, c,  fn ) -> 
                            this.CloseScope(result, s, pos, l, c, fn, nextTokenFn, freshScanLine)
                        | NoScope( s, l, c,  fn ) -> 
                            this.updateCalls(s, pos, l, c, fn, nextTokenFn) ) )
        // This step makes it end when the lexbuf is exhausted
        |> Seq.takeWhile (fun inres -> inres.IsSome )
        // This step pulls the result option out of the list control option.
        |> Seq.map( fun inres -> inres.Value )  
        // And this one makes sure that we only pass back actual tokens since they
        // are wrapped in an option (get None when the start position is negative...)
        |> Seq.choose ( fun x -> x )
