﻿//
// <author>David Nohejl</author>
//
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

namespace WPFTrees.Parser
{
    /// <summary>
    /// Lexikalni analyzator formatu Newick.
    /// </summary>
    class Lexer
    {
        /// <summary>
        /// States in which can Lexer be while reading input.
        /// </summary>
        enum State
        {
            ReadingToken,
            ReadingLabel,
            ReadingQuotedLabel,
            ReadingBranchLenght,
            ReadingComment
        }

        /// <summary>
        /// Provede lexikalni analyzu.
        /// </summary>
        /// <param name="input">Retezec obsahujici strom ve formatu Newick.</param>
        /// <returns>Seznam tokenu.</returns>
        public static List<Token> Tokenize(string input)
        {
            input = input.Trim();
            List<Token> tokens = new List<Token>();                        
            State state = State.ReadingToken;
            StringBuilder token = new StringBuilder(50);
            char c;
            int i=0;
            while (i < input.Length)
            {
                c = input[i];

                if (state == State.ReadingQuotedLabel)
                {
                    #region
                    if (c == '\'')
                    {
                        state = State.ReadingToken;
                        tokens.Add(new Token(token.ToString(),TokenType.Label));
                        token.Clear();
                    }
                    else token.Append(c);
                    #endregion
                }                
                else if (state == State.ReadingLabel || state==State.ReadingBranchLenght)
                {
                    #region
                    // whitespace
                    if (c == ' ' || c == '\t')
                    {
                        i++;
                        continue;
                    }
                    if (c == '(' || c == ')' || c == ',')
                    {
                        if (state == State.ReadingLabel)
                            tokens.Add(new Token(token.ToString(), TokenType.Label));
                        else
                            tokens.Add(new Token(token.ToString(), TokenType.BranchLenght));
                        state = State.ReadingToken;
                        token.Clear();
                        continue;
                    }
                    else if (c == ':')
                    {
                        state = State.ReadingBranchLenght;
                        tokens.Add(new Token(token.ToString(), TokenType.Label));
                        token.Clear();
                        //token.Append(":");
                        i++;
                        continue;
                    }
                    else if (c == ';')
                    {                        
                        tokens.Add(new Token(token.ToString(), state == State.ReadingBranchLenght?TokenType.BranchLenght: TokenType.Label));
                        state = State.ReadingToken;                        
                        token.Clear();
                        token.Append(";");
                        continue;
                    }
                    else if (c == '_')
                    {
                        token.Append(' ');
                    }
                    else if (c == '[')
                    {
                        tokens.Add(new Token(token.ToString(), state == State.ReadingBranchLenght ? TokenType.BranchLenght : TokenType.Label));
                        state = State.ReadingComment;
                        token.Clear();
                        token.Append("[");
                        continue;
                    }
                    else token.Append(c);
                    #endregion
                }
                else if (state == State.ReadingComment)
                {
                    #region
                    if (c == ']')
                    {
                        tokens.Add(new Token(token.ToString(), TokenType.Comment));
                        token.Clear();
                        state = State.ReadingToken;
                    }
                    else token.Append(c);
                    #endregion
                }
                else /*state = ReadingToken */
                {
                    if (c == '(') { tokens.Add(new Token("(", TokenType.OpeningBracket)); }
                    else if (c == ')') { tokens.Add(new Token(")", TokenType.ClosingBracket)); }
                    else if (c == ';') { tokens.Add(new Token(";", TokenType.Semicolon)); }
                    else if (c == ',') { tokens.Add(new Token(",", TokenType.Comma)); }
                    else if (char.IsWhiteSpace(c)) /*do nothing */;
                    else if (c == '\'') { state = State.ReadingQuotedLabel; }
                    else if (c == ':')
                    {
                        state = State.ReadingBranchLenght;
                        token.Clear();
                        //token.Append(":");
                        i++;
                        continue;
                    }
                    else if (c == '[')
                    {
                        state = State.ReadingComment;
                    }
                    else { state = State.ReadingLabel; continue; }
                }                
                i++;
            }
            
            return tokens;
        }
    }

    /// <summary>
    /// Types of tokens.
    /// </summary>
    enum TokenType
    { 
        OpeningBracket,
        ClosingBracket,
        Comma,
        Label,
        BranchLenght,
        Semicolon,
        Comment
    }  
}

