﻿using ArduinoAnalyzer.Tokens;
using ArduinoAnalyzer.Tokens.LexicalAnalysis;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;

namespace ArduinoAnalyzer.LexicalAnalysis
{
    public class State
    {
        #region Private Members

        private Dictionary<Func<char, bool>, State> _states = new Dictionary<Func<char, bool>, State>();
        private Func<string, TokenType> _tokenTypeFunc = null;

        #endregion

        #region Public Methods

        public State AddState(char character, State state)
        {
            this.AddState(c => c == character, state);
            return this;
        }

        public State AddState(Func<char, bool> charFunc, State state)
        {
            _states.Add(charFunc, state);
            return this;
        }

        public State SetToken(TokenType tokenType)
        {
            _tokenTypeFunc = s => tokenType;
            return this;
        }

        public State SetToken(Func<string, TokenType> tokenTypeFunc)
        {
            _tokenTypeFunc = tokenTypeFunc;
            return this;
        }

        public Token Resolve(string text, ref int position, LinkedList<char> lexeme)
        {
            State state;
            char c;

            do
            {
                state = null;
                if (position < text.Length)
                    c = text[position];
                else
                    c = '\0';

                foreach (var item in _states)
                {
                    if (item.Key(c))
                    {
                        lexeme.AddLast(c);
                        position++;
                        state = item.Value;
                        break;
                    }
                }
            }
            while (state == this && position < text.Length);

            if (state != null)
            {
                return state.Resolve(text, ref position, lexeme);
            }

            if (_tokenTypeFunc != null)
            {
                string lexemeString = string.Concat(lexeme);
                TokenType type = _tokenTypeFunc(lexemeString);
                return new Token(type, lexemeString);
            }

            throw new LexicalErrorException("Erro léxico.", position);
        }

        #endregion

        #region Private Methods

        #endregion
    }
}
