#pragma once
#ifndef __TOKENIZER_H__
#define __TOKENIZER_H__

#include "NezhaFoundation.h"

template<class T>
class tTokenizer
{
protected:

	const T* mFileName;

	const T* mBuffer;

	unsigned int mBufferLen;

	const T* mDelimiters;

	unsigned int mPos;

	int mLine;

public:
	tTokenizer();
	tTokenizer( const T* buffer, unsigned int size, const T* delimiters, const T* fileName = 0 );
	~tTokenizer();

	void SetSource( const T* buffer, unsigned int size, const T* delimiters, const T* fileName = 0 );

	void SetDelimiters( const T* delimiters );

	bool GetNext( _string* str );

	const T* GetTail() const;

	bool IsEnd();
};

template<>
class tTokenizer<char>
{
protected:

	const char* mFileName;

	const char* mBuffer;

	unsigned int mBufferLen;

	bool mDelimiters[256];

	unsigned int mPos;

	int mLine;

protected:
	bool IsComment( char c )
	{
		char c1 = *( mBuffer + mPos + 1 );
		return (c == '/' && c1 == '/') || (c == '-' && c1 == '-');
	}

	bool IsDelimiter( char c )
	{
		return mDelimiters[c + 128];
	}

public:
	tTokenizer()
		: mFileName( 0 )
		, mBuffer( 0 )
		, mBufferLen( 0 )
		, mPos( 0 )
		, mLine( 1 )
	{
		memset( mDelimiters, false, 256 * sizeof(bool) );
	}

	tTokenizer( const char* buffer, unsigned int size, const char* delimiters, const char* fileName = 0 )
		: mFileName( fileName )
		, mBuffer( buffer )
		, mBufferLen( size )
		, mPos( 0 )
		, mLine( 1 )
	{
		assert( buffer );
		assert( size );

		SetDelimiters( delimiters );
	}

	~tTokenizer()
	{
	}

	void SetSource( const char* buffer, unsigned int size, const char* delimiters, const char* fileName = 0 )
	{
		assert( buffer );
		assert( size );

		mFileName = fileName;
		mBuffer = buffer;
		mBufferLen = size;
		SetDelimiters( delimiters );
	}

	void SetDelimiters( const char* delimiters )
	{
		assert( delimiters );

		memset( mDelimiters, false, 256 * sizeof(bool) );

		for( char c = *delimiters; c; c = *(++delimiters) )
		{
			mDelimiters[c + 128] = true;
		}
	}

	bool GetNext( _string* str )
	{
		assert( str );

		if( mPos >= mBufferLen )
			return false;

		char lexeme[256] = { 0 };
		int i = 0;
		char c = 0;

		for( ; mPos < mBufferLen; ++mPos )
		{
			c = *( mBuffer + mPos );

			if( c == '\n' )
			{
				++mLine;
			}
			else if( IsComment( c ) )
			{
				for( mPos += 2; mPos < mBufferLen; ++mPos )
				{
					c = *( mBuffer + mPos );

					if( c == '\n' )
					{
						++mLine;
						break;
					}
				}
				if( i == 0 )
				{
					continue;
				}
				else
				{
					++mPos;
					break;
				}
			}
			if( IsDelimiter( c ) )
			{
				if( i == 0 )
				{
					continue;
				}
				else
				{
					++mPos;
					break;
				}
			}
			if( i < 255 )
			{
				lexeme[i] = c;
				++i;
			}
		}

		lexeme[i] = 0;
		*str = lexeme;
		return i != 0;
	}

	void GotoNextLine()
	{
		for( ; mPos < mBufferLen; ++mPos )
		{
			if( *( mBuffer + mPos ) == '\n' )
			{
				++mLine;
				++mPos;
				break;
			}
		}
	}

	const char* GetTail() const
	{
		if( mPos >= mBufferLen )
			return 0;
		else
			return mBuffer + mPos;
	}

	bool IsEnd()
	{
		unsigned int pos = mPos;
		char c = 0;

		for( ; mPos < mBufferLen; ++mPos )
		{
			c = *( mBuffer + mPos );

			if( c == '\n' || c == ' ' )
			{
				continue;
			}
			if( IsComment( c ) )
			{
				for( mPos += 2; mPos < mBufferLen; ++mPos )
				{
					c = *( mBuffer + mPos );

					if( c == '\n' )
						break;
				}
				continue;
			}
			if( IsDelimiter( c ) )
			{
				continue;
			}
			mPos = pos;
			return false;
		}
		mPos = pos;
		return true;
	}
};

typedef tTokenizer<char> cTokenizer;

typedef	cTokenizer cTokenizerT;	


#endif // __TOKENIZER_H__