// import com.alibaba.druid.sql.parser.*;

// import java.util.HashMap;
// import java.util.Map;

// import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
// import static com.alibaba.druid.sql.parser.Token::IDENTIFIER;

#include "SQLServerLexer.h"
#include "../../../parser/Token.h"
#include "../../../parser/LayoutCharacters.h"
#include "../../../parser/Keywords.h"
#include "../../../../Exception/IllegalStateException.h"
#include "../../../parser/NotAllowCommentException.h"
#include "../../../../utils/BOOL.h"

std::shared_ptr<std::unordered_map<std::string, Token_ptr>> SQLServerLexer::map_ = std::make_shared<std::unordered_map<std::string, Token_ptr>>();

BOOL_ptr SQLServerLexer::__init = SQLServerLexer::init();
BOOL_ptr SQLServerLexer::init()
{
  // Map<string_ptr, Token> map = new HashMap<string_ptr, Token>();

  // map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
  SQLServerLexer::map_->insert(Keywords::DEFAULT_KEYWORDS->getKeywords()->begin(), Keywords::DEFAULT_KEYWORDS->getKeywords()->end());

  SQLServerLexer::map_->insert(std::make_pair("CURSOR", Token_ptr(new Token(&Token::CURSOR))));
  SQLServerLexer::map_->insert(std::make_pair("TOP", Token_ptr(new Token(&Token::TOP))));
  SQLServerLexer::map_->insert(std::make_pair("USE", Token_ptr(new Token(&Token::USE))));
  SQLServerLexer::map_->insert(std::make_pair("WITH", Token_ptr(new Token(&Token::WITH))));
  SQLServerLexer::map_->insert(std::make_pair("PERCENT", Token_ptr(new Token(&Token::PERCENT))));
  SQLServerLexer::map_->insert(std::make_pair("IDENTITY", Token_ptr(new Token(&Token::IDENTITY))));
  SQLServerLexer::map_->insert(std::make_pair("DECLARE", Token_ptr(new Token(&Token::DECLARE))));
  SQLServerLexer::map_->insert(std::make_pair("IF", Token_ptr(new Token(&Token::IF))));
  SQLServerLexer::map_->insert(std::make_pair("ELSE", Token_ptr(new Token(&Token::ELSE))));
  SQLServerLexer::map_->insert(std::make_pair("BEGIN", Token_ptr(new Token(&Token::BEGIN))));
  SQLServerLexer::map_->insert(std::make_pair("END", Token_ptr(new Token(&Token::END))));
  SQLServerLexer::map_->insert(std::make_pair("MERGE", Token_ptr(new Token(&Token::MERGE))));
  SQLServerLexer::map_->insert(std::make_pair("USING", Token_ptr(new Token(&Token::USING))));
  SQLServerLexer::map_->insert(std::make_pair("MATCHED", Token_ptr(new Token(&Token::MATCHED))));

  // DEFAULT_SQL_SERVER_KEYWORDS = new Keywords(map);
}

Keywords_ptr SQLServerLexer::DEFAULT_SQL_SERVER_KEYWORDS = std::shared_ptr<Keywords>(new Keywords(map_));

SQLServerLexer::SQLServerLexer(char *input, int inputLength, BOOL_ptr skipComment)
    : Lexer(const_cast<wchar_t *>(encode_util::UTF8ToUnicode(input).c_str()), inputLength, skipComment)
{

  Lexer::keywords = DEFAULT_SQL_SERVER_KEYWORDS;
}

SQLServerLexer::SQLServerLexer(string_ptr input)
    : Lexer(encode_util::UTF8ToUnicode(*input))
{

  Lexer::keywords = DEFAULT_SQL_SERVER_KEYWORDS;
}

SQLServerLexer::SQLServerLexer(string_ptr input, SQLParserFeature_list_ptr features)
    : Lexer(encode_util::UTF8ToUnicode(*input))
{
  Lexer::keywords = DEFAULT_SQL_SERVER_KEYWORDS;
  for (SQLParserFeature_ptr feature : *features)
  {
    config(feature, BOOL::TRUE);
  }
}

void SQLServerLexer::scanComment()
{
  if (ch != L'/' && ch != L'-')
  {
    throw new IllegalStateException();
  }

  mark_ = pos_;
  bufPos = 0;
  scanChar();

  // /*+ */
  if (ch == L'*')
  {
    scanChar();
    bufPos++;

    while (ch == L' ')
    {
      scanChar();
      bufPos++;
    }

    BOOL_ptr isHint_ = BOOL::FALSE;
    int startHintSp = bufPos + 1;
    if (ch == L'!')
    {
      isHint_ = BOOL::TRUE;
      scanChar();
      bufPos++;
    }

    for (;;)
    {
      if (ch == L'*' && charAt(pos_ + 1) == L'/')
      {
        bufPos += 2;
        scanChar();
        scanChar();
        break;
      }

      scanChar();
      bufPos++;
    }

    if (isHint_)
    {
      stringVal_ = subString(mark_ + startHintSp, (bufPos - startHintSp) - 1);
      token_ = Token_ptr(new Token(&Token::HINT));
    }
    else
    {
      stringVal_ = subString(mark_, bufPos);
      token_ = Token_ptr(new Token(&Token::MULTI_LINE_COMMENT));
      commentCount++;
      if (keepComments)
      {
        addComment(stringVal_);
      }
    }

    if (token_->name->c_str() != Token::HINT.name->c_str() &&
        !isAllowComment() &&
        !isSafeComment(stringVal_))
    {
      throw new NotAllowCommentException();
    }

    return;
  }

  if (ch == L'/' || ch == L'-')
  {
    scanChar();
    bufPos++;

    for (;;)
    {
      if (ch == L'\r')
      {
        if (charAt(pos_ + 1) == L'\n')
        {
          bufPos += 2;
          scanChar();
          break;
        }
        bufPos++;
        break;
      }
      else if (ch == LayoutCharacters::EOI)
      {
        break;
      }

      if (ch == '\n')
      {
        scanChar();
        bufPos++;
        break;
      }

      scanChar();
      bufPos++;
    }

    stringVal_ = subString(mark_ + 1, bufPos);
    token_ = Token_ptr(new Token(&Token::LINE_COMMENT));
    commentCount++;
    if (keepComments)
    {
      addComment(stringVal_);
    }
    endOfComment = isEOF()?BOOL::TRUE:BOOL::FALSE;

    if (!isAllowComment() &&
        (isEOF() ||
         !isSafeComment(stringVal_)))
    {
      throw new NotAllowCommentException();
    }
    return;
  }
}

void SQLServerLexer::scanLBracket()
{
  mark_ = pos_;

  if (buf == nullptr)
  {
    buf = new wchar_t[32];
  }

  for (;;)
  {
    if (isEOF())
    {
      Object_list_ptr tmp = std::make_shared<std::list<Object_ptr>>();
      lexError(L"unclosed.str.lit", tmp);
      return;
    }

    ch = charAt(++pos_);

    if (ch == ']')
    {
      scanChar();
      token_ = Token_ptr(new Token(&Token::IDENTIFIER));
      break;
    }

    if (bufPos == wcslen(buf))
    {
      putChar(ch);
    }
    else
    {
      buf[bufPos++] = ch;
    }
  }

  stringVal_ = subString(mark_, bufPos + 2);
}
