#include <gtest/gtest.h>

#include "book/markdown/md-line-tokenizer.h"


using book::MdLineTokenizer;
using Token = book::MdToken;
using Type = book::MdToken::Type;

namespace
{

class Tokenizer
{
public:
    Tokenizer(const std::string_view &s)
        : stream_(s)
        , tokenizer_(stream_) {}

    Token operator()() {
        return tokenizer_.next();
    }

private:
    StrViewCharInputStream stream_;
    MdLineTokenizer tokenizer_;
};

} // namespace anonymous

TEST(MdLineTokenizer, MdLineTokenizer) {
    {
        ASSERT_FALSE(Token());
        ASSERT_FALSE(Token(Type::Invalid));
        ASSERT_EQ(Token(), Token(Type::Invalid));
    }
    {
        Tokenizer t("");
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("a");
        ASSERT_EQ(t(), Token(Type::Text, "a"));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("abc");
        ASSERT_EQ(t(), Token(Type::Text, "abc"));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("*");
        ASSERT_EQ(t(), Token(Type::StarItalic));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("**");
        ASSERT_EQ(t(), Token(Type::StarBold));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("***");
        ASSERT_EQ(t(), Token(Type::StarBoldItalic));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("_");
        ASSERT_EQ(t(), Token(Type::UnderlineItalic));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("__");
        ASSERT_EQ(t(), Token(Type::UnderlineBold));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("___");
        ASSERT_EQ(t(), Token(Type::UnderlineBoldItalic));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("~~");
        ASSERT_EQ(t(), Token(Type::DeleteLine));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("~");
        ASSERT_EQ(t(), Token(Type::Text, "~"));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("`");
        ASSERT_EQ(t(), Token(Type::Code, ""));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("``");
        ASSERT_EQ(t(), Token(Type::Code, ""));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("~a~");
        ASSERT_EQ(t(), Token(Type::Text, "~"));
        ASSERT_EQ(t(), Token(Type::Text, "a"));
        ASSERT_EQ(t(), Token(Type::Text, "~"));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t("`a`");
        ASSERT_EQ(t(), Token(Type::Code, "a"));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t(R"(\*)");
        ASSERT_EQ(t(), Token(Type::Text, "*"));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t(R"(\**)");
        ASSERT_EQ(t(), Token(Type::Text, "*"));
        ASSERT_EQ(t(), Token(Type::StarItalic));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t(R"(<)");
        ASSERT_EQ(t(), Token(Type::UnnamedHrefLeftSide));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t(R"(>)");
        ASSERT_EQ(t(), Token(Type::UnnamedHrefRightSide));
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t(R"(<link>)");
        ASSERT_EQ(t(), Token(Type::UnnamedHrefLeftSide));
        Token tok = t();
        ASSERT_EQ(tok.type(), Type::Text);
        ASSERT_EQ(tok.text(), "link");
        ASSERT_EQ(t(), Type::UnnamedHrefRightSide);
        ASSERT_FALSE(t());
    }
    {
        Tokenizer t(R"(a<b>c)");
        ASSERT_EQ(t(), Token(Type::Text, "a"));
        ASSERT_EQ(t(), Token(Type::UnnamedHrefLeftSide));
        ASSERT_EQ(t(), Token(Type::Text, "b"));
        ASSERT_EQ(t(), Token(Type::UnnamedHrefRightSide));
        ASSERT_EQ(t(), Token(Type::Text, "c"));
        ASSERT_FALSE(t());
    }

}
