// import com.alibaba.druid.DbType;
// import com.alibaba.druid.sql.parser.Keywords;
// import com.alibaba.druid.sql.parser.Lexer;
// import com.alibaba.druid.sql.parser.SQLParserFeature;
// import com.alibaba.druid.sql.parser.Token;

// import java.util.HashMap;
// import java.util.Map;

#include "HiveLexer.h"
#include <map>
#include "../../../parser/Keywords.h"
#include "../../../parser/Token.h"
#include "../../../parser/SQLParserFeature.h"
#include "../../../../DbType.h"
#include "../../../../utils/BOOL.h"

std::shared_ptr<std::unordered_map<std::string, Token_ptr>> map = std::make_shared<std::unordered_map<std::string, Token_ptr>>();

BOOL_ptr HiveLexer::__init = HiveLexer::init();
BOOL_ptr HiveLexer::init()
{

  for (const auto pair : *Keywords::DEFAULT_KEYWORDS->getKeywords())
  {
    // LOG_INFO << "pair.first:" << pair.first;
    // LOG_INFO << "pair.second:" << pair.second->getName();
    Token_ptr tmpToken = pair.second;
    map->insert(std::make_pair(pair.first, pair.second)); // 显式地复制数据
  }

  map->insert(std::make_pair("OF", Token_ptr(new Token(&Token::OF))));
  map->insert(std::make_pair("CONCAT", Token_ptr(new Token(&Token::CONCAT))));
  map->insert(std::make_pair("CONTINUE", Token_ptr(new Token(&Token::CONTINUE))));
  map->insert(std::make_pair("MERGE", Token_ptr(new Token(&Token::MERGE))));
  map->insert(std::make_pair("MATCHED", Token_ptr(new Token(&Token::MATCHED))));
  map->insert(std::make_pair("USING", Token_ptr(new Token(&Token::USING))));

  map->insert(std::make_pair("ROW", Token_ptr(new Token(&Token::ROW))));
  map->insert(std::make_pair("LIMIT", Token_ptr(new Token(&Token::LIMIT))));
  map->insert(std::make_pair("PARTITIONED", Token_ptr(new Token(&Token::PARTITIONED))));
  map->insert(std::make_pair("PARTITION", Token_ptr(new Token(&Token::PARTITION))));
  map->insert(std::make_pair("OVERWRITE", Token_ptr(new Token(&Token::OVERWRITE))));
  //        map.put("SORT", Token::SORT);
  map->insert(std::make_pair("IF", Token_ptr(new Token(&Token::IF))));
  map->insert(std::make_pair("TRUE", Token_ptr(new Token(&Token::XTRUE))));
  map->insert(std::make_pair("FALSE", Token_ptr(new Token(&Token::XFALSE))));
  map->insert(std::make_pair("RLIKE", Token_ptr(new Token(&Token::RLIKE))));
  map->insert(std::make_pair("CONSTRAINT", Token_ptr(new Token(&Token::CONSTRAINT))));
  map->insert(std::make_pair("DIV", Token_ptr(new Token(&Token::DIV))));
}

Keywords_ptr HiveLexer::DEFAULT_HIVE_KEYWORDS = std::shared_ptr<Keywords>(new Keywords(map));

HiveLexer::HiveLexer(string_ptr input)
    : Lexer(encode_util::UTF8ToUnicode(*input))
{

  this->skipComment = BOOL::TRUE;
  this->keepComments = BOOL::TRUE;
  dbType = DbType_ptr(new DbType(&DbType::hive));
  this->features |= SQLParserFeature::SupportUnicodeCodePoint.mask;
  Lexer::keywords = DEFAULT_HIVE_KEYWORDS;
}

HiveLexer::HiveLexer(string_ptr input, SQLParserFeature_list_ptr features)
    : Lexer(encode_util::UTF8ToUnicode(*input))
{
  dbType = DbType_ptr(new DbType(&DbType::hive));
  this->skipComment = BOOL::TRUE;
  this->keepComments = BOOL::TRUE;
  Lexer::keywords = DEFAULT_HIVE_KEYWORDS;
  this->features |= SQLParserFeature::SupportUnicodeCodePoint.mask;
  for (SQLParserFeature_ptr feature : *features)
  {
    config(feature, BOOL::TRUE);
  }
}

void HiveLexer::scanString()
{
  scanString2();
}

void HiveLexer::scanComment()
{
  scanHiveComment();
}
