//
//  TestLexer.cpp
//  LightBasic
//
//  Created by Albert on 26/03/2011.
//  Copyright 2011 LightBasic Development Team. All rights reserved.
//

#include <boost/test/unit_test.hpp>
#include "SourceString.h"
#include "Token.h"
#include "TokenInfo.h"
#include "Lexer.h"
#include "StringRef.h"
using namespace lbc;

/**
 * Helper to test expected token
 */
static inline void ExpectToken(Lexer & lexer, TokenType type, const::std::string & lexeme, int line = -1, int col = -1, int len = -1)
{
    Token * token = lexer.GetNext();
    BOOST_REQUIRE(token != NULL);
    BOOST_CHECK(token->GetType() == type);
    BOOST_CHECK(token->GetLexeme().str() == lexeme);        
    if (line != -1) BOOST_CHECK(token->GetLocation().GetLine() == line);
    if (col != -1)  BOOST_CHECK(token->GetLocation().GetColumn() == col);
    if (len != -1)  BOOST_CHECK(token->GetLocation().GetLength() == len);
    delete token;
}


/// start suite
BOOST_AUTO_TEST_SUITE(Lexer)
    
    /**
     * Perform basic lexer testing with various empty strings
     * all should return end of file token
     */
    BOOST_AUTO_TEST_CASE( EmptySource )
    {
        // random strings. they will all be lexed to empty
        const char * strings[] = {
            "",
            "   ",
            "\t\t",
            "\n   \n   ",
            "\r\n",
            "   \r   \n  \t  ",
            "'comment string",
            " /' stream \n '/ ",
            "/'somethign",
            "/'/' doubly nested '/'/",
            " \t _ this should be ignored \n_ ignored again",
            0
        };
        for (int i = 0; strings[i] != 0; i++) {
            lbc::Lexer lexer(strings[i]);
            
            // expect end of file
            ExpectToken(lexer, TknEndOfFile, TokenInfo::GetTokenName(TknEndOfFile).str());
        }
    }

    
    /**
     * Test token locations
     */
    BOOST_AUTO_TEST_CASE( TokenLocation )
    {
        const char * source = "one \"two\" three 42 = <= ...\n"
                              "four \t IF a = b THEN \r\n"
                              "five /'/' nested '/'/ six\n"
                              "seven/' trash\n trash /' nested\n'/\nend?'/eight";
        const std::string & EndOfStmt = TokenInfo::GetTokenName(TknEndOfStatement).str();
        const std::string & EndOfFile = TokenInfo::GetTokenName(TknEndOfFile).str();
        
        lbc::Lexer lexer(source);
        // line 1
        ExpectToken(lexer, TknIdentifier,       "ONE",      1,  1,   3);
        ExpectToken(lexer, TknStringLiteral,    "two",      1,  5,   5);
        ExpectToken(lexer, TknIdentifier,       "THREE",    1,  11,  5);
        ExpectToken(lexer, TknNumericConstant,  "42",       1,  17,  2);
        ExpectToken(lexer, TknOpAssign,         "=",        1,  20,  1);
        ExpectToken(lexer, TknOpLessEqal,       "<=",       1,  22,  2);
        ExpectToken(lexer, TknOpEllipsis,       "...",      1,  25,  3);
        ExpectToken(lexer, TknEndOfStatement,   EndOfStmt,  1,  28,  0);
        // line 2
        ExpectToken(lexer, TknIdentifier,       "FOUR",     2,  1,   4);
        ExpectToken(lexer, TknKwIF,             "IF",       2,  8,   2);
        ExpectToken(lexer, TknIdentifier,       "A",        2,  11,  1);
        ExpectToken(lexer, TknOpAssign,         "=",        2,  13,  1);
        ExpectToken(lexer, TknIdentifier,       "B",        2,  15,  1);
        ExpectToken(lexer, TknKwTHEN,           "THEN",     2,  17,  4);
        ExpectToken(lexer, TknEndOfStatement,   EndOfStmt,  2,  22,  0);
        // line 3
        ExpectToken(lexer, TknIdentifier,       "FIVE",     3,  1,   4);
        ExpectToken(lexer, TknIdentifier,       "SIX",      3,  23,  3);
        ExpectToken(lexer, TknEndOfStatement,   EndOfStmt,  3,  26,  0);
        // line 4
        ExpectToken(lexer, TknIdentifier,       "SEVEN",    4,  1,   5);
        // line 7
        ExpectToken(lexer, TknIdentifier,       "EIGHT",    7,  7,   5);
        ExpectToken(lexer, TknEndOfStatement,   EndOfStmt,  7,  12,  0);
        ExpectToken(lexer, TknEndOfFile,        EndOfFile,  7,  12,  0);
    }

    
    /**
     * Test multiline comments
     */
    BOOST_AUTO_TEST_CASE( MultiLineComments )
    {
        const char * strings[] = {
            "a/''/b",
            "a/' '/b",
            "a /''/ b",
            "a /' '/ b",
            "a /'\n'/ b",
            "a /'\r\n'/b",
            "a/'\r'/b",
            "a/'/''/'/b",
            "a/' / ' ' / '/b",
            "/' \n '/a/' /' '/\n '/b/'",
            "a _\n /' some multiline coment \n on a new line '/ _\n /' \n /' cont '/ \r\n '/ b",
            0
        };
        
        for (int i = 0; strings[i] != 0; i++) {
            lbc::Lexer lexer(strings[i]);
            // start
            ExpectToken(lexer, TknIdentifier, "A");
            // end
            ExpectToken(lexer, TknIdentifier, "B");
            // end of statement
            ExpectToken(lexer, TknEndOfStatement, TokenInfo::GetTokenName(TknEndOfStatement).str());
            // end of file
            ExpectToken(lexer, TknEndOfFile, TokenInfo::GetTokenName(TknEndOfFile).str());
        }
    }
    

    /**
     * Test that lexer can recognize tokens defined in TokenIndo.def.h
     */
    BOOST_AUTO_TEST_CASE( TokenDefTokens )
    {
        // construct the source
        std::string source = "";
        #define OPERATOR(id, lit, lex, ...) if (lex == true) source += " " lit;
        #define OPERATOR_ALIAS(id, op, ...) source += " " #id;
        #define KEYWORD(id, ...) source += " " #id;
        #include "TokenInfo.def.h"
        
        // the lexer
        lbc::Lexer lexer(source);
        
        // lex tokens
        #define OPERATOR(id, lit, lex, ...) \
            if (lex == true) ExpectToken(lexer, TknOp##id, lit);
        #define OPERATOR_ALIAS(id, op, ...) \
            ExpectToken(lexer, TknOp##op, #id);
        #define KEYWORD(id, ...) \
            ExpectToken(lexer, TknKw##id, #id);
        #include "TokenInfo.def.h"
    }

BOOST_AUTO_TEST_SUITE_END()
/// end suite
