﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NUnit.Framework;
using IronCow.Search;

namespace IronCow.UnitTests
{
    [TestFixture]
    public class TestLexicalAnalyzerTokenization
    {
        public LexicalAnalyzer Lexer { get; set; }

        [SetUp]
        public void SetUp()
        {
            Lexer = new LexicalAnalyzer();
        }

        [Test]
        public void TestWord()
        {
            var actual = Lexer.Tokenize("word");
            var expected = new TokenCollection()
            {
                new Token("word")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestAnd()
        {
            var actual = Lexer.Tokenize("one AND two");
            var expected = new TokenCollection()
            {
                new Token("one"),
                new Token(TokenType.BooleanAnd),
                new Token("two")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestOr()
        {
            var actual = Lexer.Tokenize("one OR two");
            var expected = new TokenCollection()
            {
                new Token("one"),
                new Token(TokenType.BooleanOr),
                new Token("two")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestNot()
        {
            var actual = Lexer.Tokenize("one AND NOT two");
            var expected = new TokenCollection()
            {
                new Token("one"),
                new Token(TokenType.BooleanAnd),
                new Token(TokenType.UnaryNot),
                new Token("two")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestParenthesis()
        {
            var actual = Lexer.Tokenize("(one AND two) OR three");
            var expected = new TokenCollection()
            {
                new Token(TokenType.ParenthesisOpen),
                new Token("one"),
                new Token(TokenType.BooleanAnd),
                new Token("two"),
                new Token(TokenType.ParenthesisClose),
                new Token(TokenType.BooleanOr),
                new Token("three")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestDoubleParenthesis()
        {
            var actual = Lexer.Tokenize("(one AND (two))");
            var expected = new TokenCollection()
            {
                new Token(TokenType.ParenthesisOpen),
                new Token("one"),
                new Token(TokenType.BooleanAnd),
                new Token(TokenType.ParenthesisOpen),
                new Token("two"),
                new Token(TokenType.ParenthesisClose),
                new Token(TokenType.ParenthesisClose)
            };
        }

        [Test]
        public void TestQuotes()
        {
            var actual = Lexer.Tokenize("\"one two\" three");
            var expected = new TokenCollection()
            {
                new Token("one two"),
                new Token("three")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestKeywordsInQuotes()
        {
            var actual = Lexer.Tokenize("\"one AND two\" AND three");
            var expected = new TokenCollection()
            {
                new Token("one AND two"),
                new Token(TokenType.BooleanAnd),
                new Token("three")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestOperator()
        {
            var actual = Lexer.Tokenize("dueBefore:today");
            var expected = new TokenCollection()
            {
                new Token(TokenType.Operator, "dueBefore"),
                new Token("today")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestOperatorWithQuotes()
        {
            var actual = Lexer.Tokenize("dueBefore:\"1 week\"");
            var expected = new TokenCollection()
            {
                new Token(TokenType.Operator, "dueBefore"),
                new Token("1 week")
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }

        [Test]
        public void TestComplex1()
        {
            var actual = Lexer.Tokenize("(status:incomplete AND (tag:next OR dueBefore:tomorrow))");
            var expected = new TokenCollection()
            {
                new Token(TokenType.ParenthesisOpen),
                new Token(TokenType.Operator, "status"),
                new Token("incomplete"),
                new Token(TokenType.BooleanAnd),
                new Token(TokenType.ParenthesisOpen),
                new Token(TokenType.Operator,"tag"),
                new Token("next"),
                new Token(TokenType.BooleanOr),
                new Token(TokenType.Operator,"dueBefore"),
                new Token("tomorrow"),
                new Token(TokenType.ParenthesisClose),
                new Token(TokenType.ParenthesisClose)
            };
            Assert.AreEqual(expected.ToString(), actual.ToString());
        }
    }
}
