﻿using System.Collections.Generic;
using System.Diagnostics;
using lexer.managed;

namespace Language.Parser
{
    [DebuggerDisplay( "{Id} ({Line})[{Position}-{End}]" )]
    internal class Token
    {
        public const uint COMMENT_ID = 1000;

        #region Public Properties

        public uint Id { get; set; }
        public uint Position { get; set; }
        public uint End { get; set; }
        public uint Line { get; set; }
        public uint LineStart { get; set; }

        public string Text { get; set; }

        #endregion Public Properties
    }

    internal class Listener : lexer.managed.Listener
    {
        #region Private Data

        private uint _positionOffset;
        private uint _lineOffset;

        #endregion Private Data

        #region Public Properties

        public List<Token> Tokens { get; private set; }

        #endregion Public Properties

        #region Public Methods

        public void Reset( int positionOffset, int lineOffset )
        {
            Tokens = new List<Token>();
            _positionOffset = (uint)positionOffset;
            _lineOffset = (uint)lineOffset;
        }

        #endregion Public Methods

        #region Private Methods

        private void StoreToken( State lexerState, uint id )
        {
            uint length = lexerState.CurrentContext.Byte - lexerState.TokenStart.Byte;
            string text = lexerState.Source.Substring( (int)lexerState.TokenStart.Byte, (int)length );

            Token token = new Token()
            {
                Id = id,
                Position = lexerState.TokenStart.Character + _positionOffset,
                End = lexerState.CurrentContext.Character + _positionOffset,
                Line = lexerState.TokenStart.Line + _lineOffset,
                LineStart = lexerState.LineStart.Character + _positionOffset,
                Text = text
            };

            Tokens.Add( token );
        }

        private void StoreSequence( State lexerState, uint id )
        {
            uint length = lexerState.CurrentContext.Byte - lexerState.SequenceStart.Byte;
            string text = lexerState.Source.Substring( (int)lexerState.SequenceStart.Byte, (int)length );

            Token token = new Token()
            {
                Id = id,
                Position = lexerState.SequenceStart.Character + _positionOffset,
                End = lexerState.CurrentContext.Character + _positionOffset,
                Line = lexerState.SequenceStart.Line + _lineOffset,
                LineStart = lexerState.SectionLineStart.Character + _positionOffset,
                Text = text
            };

            Tokens.Add( token );
        }

        #endregion Private Methods

        #region lexer.managed.Listener

        void lexer.managed.Listener.Comment( State lexerState )
        {
            uint length = lexerState.CurrentContext.Byte - lexerState.SequenceStart.Byte;
            string text = lexerState.Source.Substring( (int)lexerState.SequenceStart.Byte, (int)length );

            StoreSequence( lexerState, Token.COMMENT_ID );
        }

        void lexer.managed.Listener.Error( State lexerState )
        {
            uint length = lexerState.CurrentContext.Byte - lexerState.TokenStart.Byte;
            string token = lexerState.Source.Substring( (int)lexerState.TokenStart.Byte, (int)length );

            Debug.Print( $"Error: {token}" );
        }

        void lexer.managed.Listener.Sequence( State lexerState, uint id )
        {
            StoreSequence( lexerState, id );
        }

        void lexer.managed.Listener.Token( State lexerState, uint id )
        {
            StoreToken( lexerState, id );
        }

        #endregion lexer.managed.Listener
    }
}
