#include <ctype.h>
#include <stdlib.h>
#include <string.h>
#include "tokenizer.h"

#define BOOL_OP_COUNT		4
#define KEYWORD_COUNT		17

static const char *BINARY_CONSTANT				= "01";
static const char *NUMBER_CONSTANT				= "0123456789";
static const char *HEX_NUMBER_CONSTANT			= "0123456789abcdefABCDEF";

static const char *OPERATOR_CONSTANT			= "+-*/%|&^~=<>!,.()[]{}:;";
static const char *IDENTIFIER_CONSTANT			= "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
static const char *IDENTIFIER_BODY_CONSTANT		= "0123456789_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";

static const char *ONE_CHAR_OPERATORS			= "~,()[]{}:;";
static const char *TWO_CHAR_OPERATORS			= "*<>";

static const char *BOOL_OPS[BOOL_OP_COUNT]		=
{
	"or",
	"and",
	"xor",
	"not",
};

static const int BOOL_OPS_LEN[BOOL_OP_COUNT]	=
{
	2,
	3,
	3,
	3,
};

static const char *KEYWORDS[KEYWORD_COUNT]		=
{
	"as",
	"if",
	"for",
	"try",
	"case",
	"else",
	"func",
	"break",
	"raise",
	"while",
	"delete",
	"except",
	"return",
	"switch",
	"default",
	"finally",
	"continue",
};

static const int KEYWORDS_LEN[KEYWORD_COUNT]	=
{
	2,
	2,
	3,
	3,
	4,
	4,
	4,
	5,
	5,
	5,
	6,
	6,
	6,
	6,
	7,
	7,
	8,
};

static inline char peek_char(tokenizer_t *self)
{
	return rstring_at(self->buffer, self->pos);
}

static inline char next_char(tokenizer_t *self)
{
	char result = rstring_at(self->buffer, self->pos);

	switch (result)
	{
		case 0:
			return 0;

		case '\n':
		{
			self->y++;
			self->x = 0;
			break;
		}

		case '\r':
		{
			self->y++;
			self->x = 0;

			/* '\r\n' */
			if (rstring_at(self->buffer, self->pos + 1) == '\n')
				self->pos++;

			break;
		}
	}

	self->x++;
	self->pos++;
	return result;
}

static inline void skip_spaces(tokenizer_t *self)
{
	char ch = peek_char(self);

	while (isspace(ch))
	{
		next_char(self);
		ch = peek_char(self);
	}
}

static inline void skip_comments(tokenizer_t *self)
{
	while (peek_char(self) == '#')
	{
		char ch = next_char(self);
		while (ch != 0 && ch != '\n')
			ch = next_char(self);
		skip_spaces(self);
	}
}

static inline char is_bool_op(rstring_t *token)
{
	for (int i = 0; i < BOOL_OP_COUNT; i++)
		if (token->length == BOOL_OPS_LEN[i] && !strcmp(token->content, BOOL_OPS[i]))
			return 1;

	return 0;
}

static inline char is_keyword(rstring_t *token)
{
	for (int i = 0; i < KEYWORD_COUNT; i++)
		if (token->length == KEYWORDS_LEN[i] && !strcmp(token->content, KEYWORDS[i]))
			return 1;

	return 0;
}

static inline token_t *token_new(token_type_t type)
{
	token_t *token = malloc(sizeof(token_t));

	token->ref = 1;
	token->token = type;
	token->int_value = 0;
	token->float_value = 0.0;
	token->string_value = NULL;
	return token;
}

static token_t *scan_string(tokenizer_t *self)
{
	token_t *token = token_new(tk_string);
	token->string_value = rstring_new();

	char start = next_char(self);
	char remains = next_char(self);

	while (start != remains)
	{
		switch (remains)
		{
			case 0:
			{
				token_release(&token);
				self->error_code = tke_eof;
				return NULL;
			}

			case '\\':
			{
				remains = next_char(self);

				switch (remains)
				{
					case 0:
					{
						token_release(&token);
						self->error_code = tke_eof;
						return NULL;
					}

					case '0':
						remains = 0;
						break;

					case 't':
						remains = '\t';
						break;

					case 'r':
						remains = '\r';
						break;

					case 'n':
						remains = '\n';
						break;

					case 'x':
					{
						char msb = next_char(self);
						char lsb = next_char(self);

#define in(c, a, b)		((c) >= (a) && (c) <= (b))
#define is_hex(c)		(in(c, '0', '9') || in(c, 'a', 'f') || in(c, 'A', 'F'))
#define to_int(c)		(in(c, '0', '9') ? ((c) - '0') : in(c, 'a', 'f') ? ((c) - 'a' + 10) : ((c) - 'A' + 10))

						if (!is_hex(msb) || !is_hex(lsb))
						{
							token_release(&token);
							self->error_code = tke_eof;
							return NULL;
						}

						remains = (to_int(msb) << 4) | to_int(lsb);
						break;

#undef in
#undef is_hex
#undef to_int
					}

					default:
					{
						token_release(&token);
						self->error_char = remains;
						self->error_code = tke_inv_char;
						return NULL;
					}
				}

				break;
			}
		}

		rstring_append(token->string_value, remains);
		remains = next_char(self);
	}

	return token;
}

static token_t *scan_integer(tokenizer_t *self)
{
	int base = 10;
	char number = next_char(self);
	token_t *token = token_new(tk_integer);
	const char *charset = NUMBER_CONSTANT;

	if (number != '0')
	{
		token->int_value = number - '0';
	}
	else
	{
		char follow = peek_char(self);

		switch (follow)
		{
			case '.':
			{
				next_char(self);
				break;
			}

			case 'b':
			{
				base = 2;
				charset = BINARY_CONSTANT;
				next_char(self);
				break;
			}

			case 'x':
			{
				base = 16;
				charset = HEX_NUMBER_CONSTANT;
				next_char(self);
				break;
			}

			default:
			{
				token->int_value = 0;
				return token;
			}
		}
	}

	char follow = peek_char(self);

	while (follow && strchr(charset, follow))
	{
		token->int_value *= base;
		token->int_value += (follow >= '0' && follow <= '9') ? (follow - '0') : (follow >= 'a' && follow <= 'f') ? (follow - 'a' + 10) : (follow - 'A' + 10);

		next_char(self);
		follow = peek_char(self);
	}

	if (base == 10 && follow == '.')
	{
		next_char(self);
		char ch = peek_char(self);

		if (ch == '.')
		{
			self->x--;
			self->pos--;
			return token;
		}
		else if (ch < '0' || ch > '9')
		{
			token_release(&token);
			self->error_char = ch;
			self->error_code = tke_inv_float;
			return NULL;
		}

		float factor = 1.0f;

		token->token = tk_float;
		token->float_value = token->int_value;

		for (char ch = peek_char(self); ch >= '0' && ch <= '9'; ch = peek_char(self))
		{
			factor *= 0.1f;
			token->float_value += (next_char(self) - '0') * factor;
		}
	}

	return token;
}

static token_t *scan_operator(tokenizer_t *self)
{
	char op = next_char(self);
	token_t *token = token_new(tk_operator);
	token->string_value = rstring_new_char(op);

	if (op == '=') /* = == */
	{
		if (peek_char(self) == '>') /* => */
		{
			rstring_append(token->string_value, next_char(self));
			return token;
		}
	}
	else if (op == '.') /* . .. */
	{
		char follow = peek_char(self);

		if (follow != '.')
			return token;

		next_char(self);
		rstring_append(token->string_value, follow);
		return token;
	}
	else if (op == '!') /* != */
	{
		char follow = next_char(self);

		if (follow != '=')
		{
			token_release(&token);
			self->error_char = follow;
			self->error_code = tke_inv_op;
			return NULL;
		}

		rstring_append(token->string_value, follow);
		return token;
	}
	else if (strchr(ONE_CHAR_OPERATORS, op)) /* ~ , ( ) [ ] { } : ; */
	{
		return token;
	}
	else if (strchr(TWO_CHAR_OPERATORS, op)) /* * < > */
	{
		if (peek_char(self) == op) /* ** << >> */
			rstring_append(token->string_value, next_char(self));
	}

	if (peek_char(self) == '=') /* += -= *= /= %= **= &= |= ^= >>= <<= >= <= */
		rstring_append(token->string_value, next_char(self));

	return token;
}

static token_t *scan_identifier(tokenizer_t *self)
{
	char first = next_char(self);
	char follow = peek_char(self);
	token_t *token = token_new(tk_identifier);
	token->string_value = rstring_new_char(first);

	while (follow && strchr(IDENTIFIER_BODY_CONSTANT, follow))
	{
		rstring_append(token->string_value, next_char(self));
		follow = peek_char(self);
	}

	if (is_keyword(token->string_value))
		token->token = tk_keyword;
	else if (is_bool_op(token->string_value))
		token->token = tk_operator;

	return token;
}

static token_t *scan(tokenizer_t *self)
{
	skip_spaces(self);
	skip_comments(self);

	char first = peek_char(self);

	if (!first)
		return NULL;
	else if (first == '"' || first == '\'')
		return scan_string(self);
	else if (strchr(NUMBER_CONSTANT, first))
		return scan_integer(self);
	else if (strchr(OPERATOR_CONSTANT, first))
		return scan_operator(self);
	else if (strchr(IDENTIFIER_CONSTANT, first))
		return scan_identifier(self);

	self->error_char = first;
	self->error_code = tke_inv_char;
	return NULL;
}

void token_ref(token_t *self)
{
	if (self)
		self->ref++;
}

void token_unref(token_t *self)
{
	if (self && --(self->ref) == 0)
	{
		rstring_release(&(self->string_value));
		free(self);
	}
}

void token_release(token_t **self)
{
	if (self)
	{
		token_unref(*self);
		*self = NULL;
	}
}

void tokenizer_free(tokenizer_t *self)
{
	token_release(&(self->cache));
	rstring_release(&(self->buffer));
}

void tokenizer_init(tokenizer_t *self)
{
	self->x = 0;
	self->y = 0;
	self->pos = 0;
	self->cache = NULL;
	self->buffer = NULL;
	self->error_char = 0;
	self->error_code = tke_ok;
}

void tokenizer_set_source(tokenizer_t *self, rstring_t *source)
{
	rstring_ref(source);

	token_release(&(self->cache));
	rstring_release(&(self->buffer));

	self->x = 0;
	self->y = 0;
	self->pos = 0;
	self->buffer = source;
	self->error_char = 0;
	self->error_code = tke_ok;
}

token_t *tokenizer_peek(tokenizer_t *self)
{
	if (!self->cache)
		self->cache = scan(self);

	token_ref(self->cache);
	return self->cache;
}

token_t *tokenizer_next(tokenizer_t *self)
{
	token_t *token = self->cache;

	if (!token)
		token = scan(self);

	self->cache = NULL;
	return token;
}
