#include "TokenizerTests.hpp"

namespace CPreProc
{
  void TokenizerTests::run_simple_test()
  {
    Tokenizer tokenizer("aaa << \t 1234 \"quoted string\" L\"quoted \\\"string\\\"\"");

    TokenDesc expected_tokens[] = 
    {
      {"aaa",                 TokenType_Identifier,     0},
      {" ",                   TokenType_WhiteSpace,     0},
      {"<<",                  TokenType_Punctuator,     Punct_ShiftLeft},
      {" \t ",                TokenType_WhiteSpace,     0},
      {"1234",                TokenType_PPNumber,       0},
      {" ",                   TokenType_WhiteSpace,     0},
      {"\"quoted string\"",   TokenType_StringLiteral,  StringLiteralType_CharString},
      {" ",                   TokenType_WhiteSpace,     0},
      {"L\"quoted \\\"string\\\"\"", TokenType_StringLiteral, StringLiteralType_WideString},
    };

    test_tokenization(tokenizer,expected_tokens,
      sizeof(expected_tokens) / sizeof(expected_tokens[0]));
  }

  std::vector<Token> TokenizerTests::tokenize(Tokenizer &tokenizer)
  {
    tokenizer.skip_optional_whitespace();

    std::vector<Token> ret;
    while(!tokenizer.end_of_data())
    {
      Token token = tokenizer.read_pp_token();
      ret.push_back(token);
    }

    return ret;
  }

  void TokenizerTests::test_tokenization(Tokenizer &tokenizer,TokenDesc *expected_tokens,int num_expected_tokens)
  {
    std::vector<Token> tokens = tokenize(tokenizer);
    test_tokens(tokens,expected_tokens,num_expected_tokens);
  }

  void TokenizerTests::test_tokens(const std::vector<Token> &tokens,TokenDesc *expected_tokens,int num_expected_tokens)
  {
    for(int i = 0;i < (int)tokens.size();i++)
    {
      assert(i < num_expected_tokens);
      assert(tokens[i].to_string() == expected_tokens[i].text);
      assert(tokens[i].get_token_type() == expected_tokens[i].type);
      assert(tokens[i].get_token_id() == expected_tokens[i].id);
    }
  }

  void TokenizerTests::run_pp_number_tests()
  {
    Tokenizer tokenizer("1234 12.04 0xbaadf00d 135e+100 .001 "
      ".a100 1...2 .1.... 5.tostring() 123f+1");
    
    TokenDesc expected_tokens[] = 
    {
      {"1234",        TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {"12.04",       TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {"0xbaadf00d",  TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {"135e+100",    TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {".001",        TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {".",           TokenType_Punctuator, Punct_Dot},
      {"a100",        TokenType_Identifier, 0},
      {" ",           TokenType_WhiteSpace, 0},
      {"1...2",       TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {".1....",      TokenType_PPNumber,   0},
      {" ",           TokenType_WhiteSpace, 0},
      {"5.tostring",  TokenType_PPNumber,   0},
      {"(",           TokenType_Punctuator, Punct_LeftParenthesis},
      {")",           TokenType_Punctuator, Punct_RightParenthesis},
      {" ",           TokenType_WhiteSpace, 0},
      {"123f",        TokenType_PPNumber,   0},
      {"+",           TokenType_Punctuator, Punct_Plus},
      {"1",           TokenType_PPNumber,   0},
    };

    test_tokenization(tokenizer,expected_tokens,
      sizeof(expected_tokens) / sizeof(expected_tokens[0]));
  }

  void TokenizerTests::run_test_from_standard_spec1()
  {
    Tokenizer tokenizer("0x3<1/a.h>1e2");

    TokenDesc expected_tokens[] = 
    {
      {"0x3",         TokenType_PPNumber,     0},
      {"<",           TokenType_Punctuator,   Punct_LessThan},
      {"1",           TokenType_PPNumber,     0},
      {"/",           TokenType_Punctuator,   Punct_Div},
      {"a",           TokenType_Identifier,   0},
      {".",           TokenType_Punctuator,   Punct_Dot},
      {"h",           TokenType_Identifier,   0},
      {">",           TokenType_Punctuator,   Punct_GreaterThan},
      {"1e2",         TokenType_PPNumber,     0},
    };

    test_tokenization(tokenizer,expected_tokens,
      sizeof(expected_tokens) / sizeof(expected_tokens[0]));
  }

  void TokenizerTests::run_test_from_standard_spec2()
  {
    Tokenizer tokenizer("#define const.member@$");

    TokenDesc expected_tokens[] = 
    {
      {"#",         TokenType_Punctuator,     Punct_Sharp},
      {"define",    TokenType_Identifier,     0},
      {" ",         TokenType_WhiteSpace,     0},
      {"const",     TokenType_Identifier,     0},
      {".",         TokenType_Punctuator,     Punct_Dot},
      {"member",    TokenType_Identifier,     0},
      {"@",         TokenType_Misc,           0},
      {"$",         TokenType_Misc,           0},
    };

    test_tokenization(tokenizer,expected_tokens,
      sizeof(expected_tokens) / sizeof(expected_tokens[0]));
  }
}

