superstrict

import "Database.bmx"
import "Lexer.bmx"
import "Datatype.bmx"
import "Symbol.bmx"

type DBParser

	'// -----------------------------------------------------------------------------------------------------------------
	'// 
	'// -----------------------------------------------------------------------------------------------------------------
	const NULL_STRING:string = "_&%NULL%&_";
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// 
	'// -----------------------------------------------------------------------------------------------------------------
	function create:DBParser(lex:DBLexer)
	
		local p:DBParser = new DBParser;
		
		p.m_lexer = lex;
		
		return p;
	
	end function
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// 
	'// -----------------------------------------------------------------------------------------------------------------
	method parse:Symbol(errors:TList)
	
		m_errorList = errors;
		lexer_nextToken();
		return g_translation_unit();
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// translation-unit:
	'//     database-definition
	'// -----------------------------------------------------------------------------------------------------------------
	method g_translation_unit:Symbol()
	
		local scope:Symbol = Symbol.create("translation_unit", Symbol.TRANSLATION_UNIT);
		
		g_database_definition(scope);
		
		if (lexer_nextToken().m_type <> Token.TYPE_EOF)
		
			parser_error(Error.EXPECTED, "<end-of-file> token");
			return null;
		
		end if
		
		return scope;
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// database-sequence:
	'//     database-definition
	'//     database-sequence
	'// -----------------------------------------------------------------------------------------------------------------
	method g_database_sequence:byte(scope:Symbol)
	
		local tok:Token = null;
		
		while (true)
		
			'// Peek at the next token.
			tok = lexer_nextToken(); 
			
			'// If we reach the end of the file, or find a closing brace, we're done.
			if (tok.m_type = Token.TYPE_CLOSE_BRACE ..
			or tok.m_type = Token.TYPE_EOF) ..
				exit;
			
			'// We allow extra semicolons for now.
			if (tok.m_type = Token.TYPE_SEMICOLON)
			
				lexer_consumeToken();
				continue;
			
			end if
			
			'// Parse the database-definition.
			g_database_definition(scope);
		
		end while
		
		return true;
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// database-definition:
	'//     database identifier database-body
	'// -----------------------------------------------------------------------------------------------------------------
	method g_database_definition:byte(scope:Symbol)
	
		local tok:Token = lexer_nextToken();
		local def:Symbol = Symbol.create("", Symbol.DATABASE);
		local ident:string;
		
		if (tok.m_type = Token.RID_DATABASE)
		
			'// Consume the 'database' token.
			lexer_consumeToken();
			
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// Parse the identifier.
			ident = g_identifier(scope, Token.RID_DATABASE);
			
			if (not ident) ..
				return false;
			
			'// Set the name of this database symbol.
			def.m_name = ident;
			
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// Parse the database-body.
			g_database_body(def);
			
			'// Define the database within the current scope.
			if (def) ..
				parser_define(scope, def);
		
		else
		
			parser_skipToEndOfBlockOrColumn();
			parser_error(Error.EXPECTED, "database-definition before '" + tok.m_lexeme + "' token");
			return false;
		
		end if
		
		return true;
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// identifier:
	'//     letter letter-or-number (opt)
	'// 
	'// letter:
	'//     a b c d e f g h i j k l m n o p q r s t u v w x y z
	'//     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
	'//     _
	'// 
	'// letter-or-number:
	'//     letter
	'//     number
	'//     letter-or-number
	'// -----------------------------------------------------------------------------------------------------------------
	method g_identifier:string(scope:Symbol, lastTokenType:short)
	
		local tok:Token = lexer_nextToken();
		
		'// Consume the identifier.
		if (tok.m_type = Token.TYPE_IDENT)
		
			lexer_consumeToken();
			
			rem
			'// Check for semantical errors.
			if (lastTokenType = Token.RID_DATABASE or lastTokenType = Token.RID_TABLE or lastTokenType = Token.RID_ROW)
			
				'// The identifier must not be declared.
				;
			
			else
			
				'// The identifier must be declared.
				;
			
			end if
			end rem
			
			return tok.m_lexeme;
		
		end if
		
		parser_skipToEndOfBlockOrColumn();
		parser_error(Error.EXPECTED, "identifier before '" + tok.m_lexeme + "' token");
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// database-body:
	'//     { database-sequence (opt) table-sequence (opt) }
	'//     ;
	'// -------------------------------------------------------------------------------------------------------------
	method g_database_body(scope:Symbol)
	
		local tok:Token = null;
		local def:Symbol = null;
		
		'// If we reach a ';', we're done.
		if (lexer_nextTokenIs(Token.TYPE_SEMICOLON))
		
			lexer_consumeToken();
			return;
		
		end if
		
		'// Consume the '{' token.
		if (not parser_require(Token.TYPE_OPEN_BRACE, "{"))
		
			parser_skipToClosingBrace();
			return;
		
		end if
		
		'// If we find a '}' we have an empty body.
		tok = lexer_nextToken();
		
		if (tok.m_type = Token.TYPE_CLOSE_BRACE)
		
			lexer_consumeToken();
			return;
		
		end if
		
		'// Parse the definitions of the body.
		while (true)
		
			def = null;
			
			'// Peek at the next token.
			tok = lexer_nextToken(); 
			
			'// If we reach the end of the file, or find a closing brace, we're done.
			if (tok.m_type = Token.TYPE_CLOSE_BRACE ..
			or tok.m_type = Token.TYPE_EOF) ..
				exit;
			
			'// We allow extra semicolons for now.
			if (tok.m_type = Token.TYPE_SEMICOLON)
			
				lexer_consumeToken();
				continue;
			
			end if
			
			'// Parse the definition.
			select (tok.m_type)
			
				case Token.RID_DATABASE
					g_database_definition(scope);
				case Token.RID_TABLE
					g_table_definition(scope);
				default
					parser_error(Error.EXPECTED, "database-definition or table-definition before '" + ..
						tok.m_lexeme + "' token");
					parser_skipToEndOfBlockOrColumn();
			
			end select
		
		end while
		
		'// Consume the '}' token.
		if (not parser_require(Token.TYPE_CLOSE_BRACE, "}"))
		
			parser_skipToClosingBrace();
			return;
		
		end if
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// table-definition:
	'//     table identifier ( column-decl-list (opt) ) table-body
	'// 
	'// column-decl-list:
	'//     column-declaration
	'//     column-declaration , column-decl-list
	'// 
	'// column-declaration:
	'//     datatype identifier
	'// -----------------------------------------------------------------------------------------------------------------
	method g_table_definition:byte(scope:Symbol)
	
		local tok:Token = lexer_nextToken();
		local def:Symbol = Symbol.create("", Symbol.TABLE);
		local col:Symbol = null;
		local ident:string;
		
		if (tok.m_type = Token.RID_TABLE)
		
			'// Consume the 'table' token.
			lexer_consumeToken();
			
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// Parse the identifier.
			ident = g_identifier(scope, Token.RID_TABLE);
			
			'// Set the name of this table symbol.
			def.m_name = ident;
			
			'// Consume the '('.
			if (not parser_require(Token.TYPE_OPEN_PAREN, "("))
			
				parser_skipToEndOfColumn();
				return false;
			
			end if
			
			'// If the next token is an identifier, we're looking at a column-sequence.
			if (lexer_nextTokenIsDatatype())
			
				while (true)
				
					'// Peek at the next token.
					tok = lexer_nextToken(); 
					
					'// If we reach the end of the file, or find a ')' token, we're done.
					if (tok.m_type = Token.TYPE_CLOSE_PAREN ..
					or tok.m_type = Token.TYPE_EOF) ..
						exit;
					
					col = Symbol.create("", Symbol.COLUMN);
					
					'// Parse the datatype.
					col.m_datatype = g_datatype();
					
					if (col.m_datatype = Datatype.DT_NONE) ..
						return false;
					
					'// Parse the identifier.
					col.m_name = g_identifier(scope, Token.RID_TABLE);
					
					if (not col.m_name) ..
						return false;
					
					'// Peek at the next token.
					tok = lexer_nextToken(); 
					
					'// Define the column symbol.
					parser_define(def, col);
					
					'// If the next token is not a ',' token, we're done.
					if (tok.m_type <> Token.TYPE_COMMA) ..
						exit;
					
					'// Consume the ',' token.
					lexer_consumeToken();
				
				end while
			
			end if
			
			'// Consume the ')'.
			if (not parser_require(Token.TYPE_CLOSE_PAREN, ")"))
			
				parser_skipToEndOfColumn();
				return false;
			
			end if
			
			if (not ident) ..
				return false;
			
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// Parse the table-body.
			if (g_table_body(def))
			
				'// Define the table within the current scope.
				parser_define(scope, def);
			
			end if
		
		else
		
			parser_skipToEndOfBlockOrColumn();
			parser_error(Error.EXPECTED, "table-definition before '" + tok.m_lexeme + "' token");
			return false;
		
		end if
		
		return true;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// table-body:
	'//     { row-sequence }
	'//     ;
	'// 
	'// row-sequence:
	'//     row-definition
	'//     row-sequence
	'// -------------------------------------------------------------------------------------------------------------
	method g_table_body:byte(scope:Symbol)
	
		local tok:Token = null;
		local def:Symbol = null;
		local result:byte = true;
		
		'// If we reach a ';', we're done.
		if (lexer_nextTokenIs(Token.TYPE_SEMICOLON))
		
			lexer_consumeToken();
			return true;
		
		end if
		
		'// Consume the '{' token.
		if (not parser_require(Token.TYPE_OPEN_BRACE, "{"))
		
			parser_skipToClosingBrace();
			return false;
		
		end if
		
		'// If we find a '}' we have an empty body.
		tok = lexer_nextToken();
		
		if (tok.m_type = Token.TYPE_CLOSE_BRACE)
		
			lexer_consumeToken();
			return true;
		
		end if
		
		'// Parse the row-definitions of the body.
		while (true)
		
			def = null;
			
			'// Peek at the next token.
			tok = lexer_nextToken(); 
			
			'// If we reach the end of the file, or find a closing brace, we're done.
			if (tok.m_type = Token.TYPE_CLOSE_BRACE ..
			or tok.m_type = Token.TYPE_EOF) ..
				exit;
			
			'// We allow extra semicolons for now.
			if (tok.m_type = Token.TYPE_SEMICOLON)
			
				lexer_consumeToken();
				continue;
			
			end if
			
			'// Parse the definition.
			select (tok.m_type)
			
				case Token.RID_ROW
					g_row_definition(scope);
				default
					parser_error(Error.EXPECTED, "row-definition before '" + tok.m_lexeme + "' token");
					parser_skipToEndOfBlockOrColumn();
					result = false;
			
			end select
		
		end while
		
		'// Consume the '}' token.
		if (not parser_require(Token.TYPE_CLOSE_BRACE, "}"))
		
			parser_skipToClosingBrace();
			return false;
		
		end if
		
		return result;
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// row-definition:
	'//     row identifier row-body
	'// -----------------------------------------------------------------------------------------------------------------
	method g_row_definition:byte(scope:Symbol)
	
		local tok:Token = lexer_nextToken();
		local def:Symbol = Symbol.create("", Symbol.ROW);
		local ident:string;
		local result:byte = false;
		
		if (tok.m_type = Token.RID_ROW)
		
			'// Consume the 'row' token.
			lexer_consumeToken();
			
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// Parse the identifier.
			ident = g_identifier(scope, Token.RID_ROW);
			
			if (not ident) ..
				return false;
			
			'// Set the name of this row symbol.
			def.m_name = ident;
			
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// Define the row within the current scope.
			parser_define(scope, def);
			
			'// Parse the row-body.
			result = g_row_body(def);
		
		else
		
			parser_skipToEndOfBlockOrColumn();
			parser_error(Error.EXPECTED, "row-definition before '" + tok.m_lexeme + "' token");
			return false;
		
		end if
		
		return result;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// row-body:
	'//     { column-sequence }
	'//     ;
	'// 
	'// column-sequence:
	'//     identifier = expression ;
	'//     column-sequence
	'// -------------------------------------------------------------------------------------------------------------
	method g_row_body:byte(scope:Symbol)
	
		local tok:Token = null;
		local colIdent:string = "";
		local col:Symbol = null;
		local def:Symbol = null;
		local result:byte = true;
		
		'// If we reach a ';', we're done.
		if (lexer_nextTokenIs(Token.TYPE_SEMICOLON))
		
			lexer_consumeToken();
			return true;
		
		end if
		
		'// Consume the '{' token.
		if (not parser_require(Token.TYPE_OPEN_BRACE, "{"))
				
			parser_skipToClosingBrace();
			return false;
		
		end if
		
		'// If we find a '}' we have an empty body.
		tok = lexer_nextToken();
		
		if (tok.m_type = Token.TYPE_CLOSE_BRACE)
		
			lexer_consumeToken();
			return true;
		
		end if
		
		'// Parse the columns of the body.
		while (true)
		
			def = null;
			colIdent = "";
			
			'// Peek at the next token.
			tok = lexer_nextToken(); 
			
			'// If we reach the end of the file, or find a closing brace, we're done.
			if (tok.m_type = Token.TYPE_CLOSE_BRACE ..
			or tok.m_type = Token.TYPE_EOF) ..
				exit;
			
			'// We allow extra semicolons for now.
			if (tok.m_type = Token.TYPE_SEMICOLON)
			
				lexer_consumeToken();
				continue;
			
			end if
			
			'// Peek at the next token.
			tok = lexer_nextToken(); 
			
			'// Consume the identifier and remember the column ident.
			colIdent = g_identifier(scope, Token.RID_ROW);
			
			if (not colIdent)
			
				result = false;
				continue;
			
			end if
			
			'// Create a new assignment symbol.
			def = Symbol.create(colIdent, Symbol.ASSIGNMENT);
			
			'// Get the column symbol.
			col = Symbol(scope.m_parent.findColumn(colIdent));
			
			if (not col)
			
				result = false;
				parser_skipToEndOfColumn();
				parser_error(Error.A_NOT_DECLARED_IN_SCOPE, colIdent, tok.m_line);
				continue;
			
			end if
			
			'// Consume the '='.
			if (not parser_require(Token.TYPE_ASSIGN, "="))
			
				result = false;
				parser_skipToEndOfColumn();
				continue;
			
			end if
			
			'// Parse the expression and set the value of this assignment symbol.
			def.m_value = g_expression(scope);
			
			if (def.m_value = NULL_STRING)
			
				result = false;
				continue;
			
			end if
			
			'// Consume the ';'.
			if (not parser_require(Token.TYPE_SEMICOLON, ";"))
			
				result = false;
				parser_skipToEndOfColumn();
				continue;
			
			end if
			
			'// Define this assignment (binding it to the current scope).
			parser_define(scope, def);
		
		end while
		
		'// Consume the '}' token.
		if (not parser_require(Token.TYPE_CLOSE_BRACE, "}"))
		
			parser_skipToClosingBrace();
			return false;
		
		end if
		
		return result;
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// datatype:
	'//     bool, int, real, char, string
	'// -----------------------------------------------------------------------------------------------------------------
	method g_datatype:short()
	
		local tok:Token = lexer_nextToken();
		local dt:short = Datatype.DT_NONE;
		
		select (tok.m_type)
		
			case Token.RID_BOOL
				lexer_consumeToken();
				dt = Datatype.DT_BOOL;
			case Token.RID_INT
				lexer_consumeToken();
				dt = Datatype.DT_INT;
			case Token.RID_REAL
				lexer_consumeToken();
				dt = Datatype.DT_REAL;
			case Token.RID_CHAR
				lexer_consumeToken();
				dt = Datatype.DT_CHAR;
			case Token.RID_STRING
				lexer_consumeToken();
				dt = Datatype.DT_STRING;
			default
				parser_skipToEndOfColumn();
				parser_error(Error.EXPECTED, "datatype before '" + tok.m_lexeme + "' token");
		
		end select
		
		return dt;
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// expression:
	'//     primary-expression
	'// -----------------------------------------------------------------------------------------------------------------
	method g_expression:string(scope:Symbol)
	
		return g_primary_expression(scope);
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// primary-expression:
	'//     literal
	'//     reference
	'// 
	'// literal:
	'//     string-literal
	'//     real-literal
	'//     char-literal
	'//     bool-literal
	'//     integer-literal
	'// 
	'// reference:
	'//     [ database-ref-expression . table . row . column ]
	'// 
	'// database-ref-expression:
	'//     database
	'//     database : database-ref-expression
	'// -----------------------------------------------------------------------------------------------------------------
	method g_primary_expression:string(scope:Symbol)
	
		local tok:Token = lexer_nextToken();
		local dbRefExpr:Symbol;
		
		select (tok.m_type)
		
			case Token.TYPE_INTEGER, ..
			     Token.TYPE_REAL, ..
			     Token.TYPE_STRING, ..
			     Token.TYPE_CHAR, ..
			     Token.RID_TRUE, ..
			     Token.RID_FALSE, ..
			     Token.RID_NULL
				'// Consume the literal token.
				lexer_consumeToken();
				
				'// Return the value of the literal.
				return tok.m_lexeme;
			case Token.TYPE_OPEN_SQUARE
				'// Consume the '[' token.
				lexer_consumeToken();
				
				'// Parse the database-ref-expression.
				dbRefExpr = g_database_ref_expression(scope);
				
				if (not dbRefExpr) ..
					return NULL_STRING;
				
				'// Consume the '.' token.
				if (not parser_require(Token.TYPE_DOT, "."))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				'// Consume the table identifier token.
				if (not parser_require(Token.TYPE_IDENT, "identifier"))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				'// Consume the '.' token.
				if (not parser_require(Token.TYPE_DOT, "."))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				'// Consume the row identifier token.
				if (not parser_require(Token.TYPE_IDENT, "row"))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				'// Consume the '.' token.
				if (not parser_require(Token.TYPE_DOT, "."))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				'// Consume the column identifier token.
				if (not parser_require(Token.TYPE_IDENT, "identifier"))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				'// Consume the ']' token.
				if (not parser_require(Token.TYPE_CLOSE_SQUARE, "]"))
				
					parser_skipToEndOfColumn();
					return NULL_STRING;
				
				end if
				
				return "null";
			
			default
				parser_skipToEndOfColumn();
				parser_error(Error.EXPECTED, "primary-expression before '" + tok.m_lexeme + "' token");
				return NULL_STRING;
		
		end select
	
	end method
	
	'// -----------------------------------------------------------------------------------------------------------------
	'// database-ref-expression:
	'//     database
	'//     database : database-ref-expression
	'// -----------------------------------------------------------------------------------------------------------------
	method g_database_ref_expression:Symbol(scope:Symbol)
	
		local tok:Token = null;
		local db:string = "";
		
		while (true)
		
			'// Peek at the next token.
			tok = lexer_nextToken();
			
			'// If we reach the end of the file, we're done.
			if (tok.m_type = Token.TYPE_EOF) ..
			
				parser_skipToEndOfColumn();
				parser_error(Error.EXPECTED, "identifier before '" + tok.m_lexeme + "' token");
				return null;
			
			end if
			
			'// Parse the database identifier.
			db = g_identifier(scope, Token.RID_DATABASE);
			
			if (not db) ..
				return null;
			
			'// If the next token is not a ':' we are done.
			if (lexer_nextToken().m_type <> Token.TYPE_COLON)
			
				exit;
			
			end if
			
			'// Consume the ':' token.
			lexer_consumeToken();
		
		end while
		
		'// Temporary.
		return new Symbol;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// 
	'// -------------------------------------------------------------------------------------------------------------
	method parser_define(scope:Symbol, symb:Symbol)
	
		if (scope.m_type = Symbol.TABLE and symb.m_type = Symbol.COLUMN)
		
			scope.pushColumn(symb);
		
		else
		
			scope.push(symb);
		
		end if
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Put an error message in the errorlist.
	'// -------------------------------------------------------------------------------------------------------------
	method parser_error(msg_type:short, data:string = null, line:SourceLine = null)
	
		if (msg_type < 0) ..
			return;
		
		if (not line) ..
			line = lexer_nextToken().m_line;
		
		m_errorList.addLast(Error.createError(msg_type, line, data));
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// If the next token is of the indicated 'tokenType', consume it. Otherwise, issue an error message indicating
	'// that 'tokenDesc' was expected. Returns the token consumed, if the token had the appropriate type. Otherwise,
	'// returns null.
	'// -------------------------------------------------------------------------------------------------------------
	method parser_require:Token(tokenType:short, tokenDesc:string)
	
		if (lexer_nextTokenIs(tokenType))
		
			return lexer_consumeToken();
		
		else
		
			parser_error(Error.EXPECTED_A_BEFORE_B, tokenDesc + "#" + lexer_nextToken().m_lexeme);
			return null;
		
		end if
	
	end method

	'// -------------------------------------------------------------------------------------------------------------
	'// Skip tokens until a non-nested closing curly brace is the next token, or there are no more tokens. Returns
	'// true in the first case, false otherwise.
	'// -------------------------------------------------------------------------------------------------------------
	method parser_skipToClosingBrace:byte()
	
		local nestingDepth:int = 0;
		
		while (true)
		
			select (lexer_nextToken().m_type)
			
				case Token.TYPE_EOF
					return false;
				case Token.TYPE_CLOSE_BRACE
					nestingDepth :- 1;
					if (nestingDepth <= 0) ..
						return true;
				case Token.TYPE_OPEN_BRACE
					nestingDepth :+ 1;
			
			end select
			
			lexer_consumeToken();
		
		end while
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Skip tokens until we have consumed an entire block, or until we have consumed a non-nested `;'.
	'// -------------------------------------------------------------------------------------------------------------
	method parser_skipToEndOfBlockOrColumn()
	
		local nestingDepth:int = 0;
		
		while (nestingDepth >= 0)
		
			select (lexer_nextToken().m_type)
			
				case Token.TYPE_EOF
					'// If we've run out of tokens, stop.
					return;
				case Token.TYPE_SEMICOLON
					'// Stop if this is an unnested ';'.
					if (nestingDepth = 0) ..
						nestingDepth = -1;
				case Token.TYPE_CLOSE_BRACE
					'// Stop if this is an unnested '}', or closes the outermost nesting level.
					nestingDepth :- 1;
					if (nestingDepth < 0) ..
						return;
					if (nestingDepth = 0) ..
						nestingDepth = -1;
				case Token.TYPE_OPEN_BRACE
					'// Nest.
					nestingDepth :+ 1;
			
			end select
			
			'// Consume the token.
			lexer_consumeToken();
		
		end while
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Consume tokens until we reach the end of the current column. Normally, that will be just before consuming
	'// a `;'. However, if a non-nested `}' comes first, then we stop before consuming that.
	'// -------------------------------------------------------------------------------------------------------------
	method parser_skipToEndOfColumn()
	
		local nestingDepth:int = 0;
		
		while (true)
		
			select (lexer_nextToken().m_type)
			
				case Token.TYPE_EOF
					'// If we've run out of tokens, stop.
					return;
				case Token.TYPE_SEMICOLON
					'// If the next token is a `;', we have reached the end of the column.
					if (nestingDepth = 0) ..
						return;
				case Token.TYPE_CLOSE_BRACE
					if (nestingDepth = 0) ..
						return;
					nestingDepth :+ 1;
					if (nestingDepth <= 0) then
					
						lexer_consumeToken();
						return;
					
					end if
				case Token.TYPE_OPEN_BRACE
					nestingDepth :+ 1;
			
			end select
			
			'// Consume the token.
			lexer_consumeToken();
		
		end while
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Return the next token.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_nextToken:Token()
	
		if (not m_nextTokenLink)..
			m_nextTokenLink = m_lexer.m_tokenStream.firstLink();
		
		return Token(m_nextTokenLink.value());
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Returns true if the next token is the indicated token type.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_nextTokenIs:byte(tokenType:short)
	
		return lexer_nextToken().m_type = tokenType;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Returns true if the next token is a datatype keyword.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_nextTokenIsDatatype:byte()
	
		select (lexer_nextToken().m_type)
		
			case Token.RID_BOOL, ..
			     Token.RID_INT, ..
			     Token.RID_REAL, ..
			     Token.RID_CHAR, ..
			     Token.RID_STRING
			   return true;
			default
				return false;
		
		end select
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Returns true if the specified token is the end-of-file token.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_nextTokenIsEof:byte()
	
		return lexer_nextToken().m_type = Token.TYPE_EOF;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Returns true if the specified token is the indicated token type.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_tokenIs:byte(tok:Token, tokenType:short)
	
		return tok.m_type = tokenType;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Returns true if the specified token is the end-of-file token.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_tokenIsEof:byte(tok:Token)
	
		return tok.m_type = Token.TYPE_EOF;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Consume a token and return it.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_consumeToken:Token(amount:short = 1)
	
		if (not m_nextTokenLink) ..
			return null;
		
		if (lexer_nextToken().m_type = Token.TYPE_EOF) ..
			return lexer_nextToken();
		
		local count:short = 0;
		
		repeat
		
			count :+ 1;
			
			m_nextTokenLink = m_nextTokenLink._succ;
			
			if (Token(m_nextTokenLink.value()).m_type = Token.TYPE_EOF) ..
				exit;
			
			if (count = amount) ..
				exit;
		
		forever
		
		return Token(m_nextTokenLink.value());
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// Return the next token non-purged in the token stream, but do not consume it.
	'// -------------------------------------------------------------------------------------------------------------
	method lexer_peekToken:Token()
	
		if (not m_nextTokenLink) ..
			return null;
		
		local old_m_nextTokenLink:TLink = m_nextTokenLink;
		local peekToken:Token;
		
		repeat
		
			m_nextTokenLink = m_nextTokenLink._succ;
			if (Token(m_nextTokenLink.value()).m_type = Token.TYPE_EOF) ..
				exit;
		
		forever
		
		peekToken = Token(m_nextTokenLink.value());
		m_nextTokenLink = old_m_nextTokenLink;
		
		return peekToken;
	
	end method
	
	'// -------------------------------------------------------------------------------------------------------------
	'// A pointer to the link of the next token in the token stream received from the lexer.
	'// -------------------------------------------------------------------------------------------------------------
	field m_nextTokenLink:TLink;
	
	'// -------------------------------------------------------------------------------------------------------------
	'// A pointer to the lexer to recieve the token stream from.
	'// -------------------------------------------------------------------------------------------------------------
	field m_lexer:DBLexer;
	
	'// -------------------------------------------------------------------------------------------------------------
	'// A pointer to the errorlist.
	'// -------------------------------------------------------------------------------------------------------------
	field m_errorList:TList;

end type
