#include "tokenizer.h"
#include <string.h>
#include <vector>
#include <iostream>

namespace hse {
using std::vector;
using std::string;
using std::cout;
using std::endl;

//for compile
bool Tokenizer::is_spilter_[256] = {false};

void Tokenizer::Init() {
	memset(is_spilter_, 0, 256 * sizeof(bool));
	for (int i = 0; i < 256; ++i)
		is_spilter_[i] = !(
			//(i >= '0' && i <= '9') ||
			(i >= 'a' && i <= 'z') ||
			(i >= 'A' && i <= 'Z'));
}

void Tokenizer::Tokenize(string& query, vector<string>& tokens, int max_token) {
	LOG_INFO(cout << "tokenize query:" << query << endl);
	if (max_token <= 0) {
		return;
	}
	LOG_INFO(cout << "Get tokens:");
	char token[MAX_TERM_LEN];
	size_t p = 0;
	int l = 0;
	int count = 0;
	while (p < query.size()) {
		if (IsSpliter(query[p])) {
			if (l > 0) {
				tokens.push_back(string(token, 0, l));
				LOG_INFO(cout << tokens.back() << ",");
				count++;
				if (count == max_token) break;
			}
			l = 0;
		} else {
			token[l++] = tolower(query[p]);
		}
		p++;
	}
	if (l > 0) {
		tokens.push_back(string(token, 0, l));
		LOG_INFO(cout << tokens.back() << ",");
	}
	LOG_INFO(cout << endl);
}

}