// ------------------------------------------------------------------------------
// 
// Copyright (c) 2008-2009 Swampware, Inc.
// 
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
// 
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// 
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// 
// ------------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;

namespace Bamboo.Parsing.Generators.CPlusPlus
{
	public class TokenizerGenerator
	{

		public static void GenerateHeader(string name, string multinamespace, Bamboo.Parsing.FiniteAutomata.FiniteAutomaton finiteAutomaton, System.IO.TextWriter writer)
		{
			writer.WriteLine("//");
			writer.WriteLine("// " + name + "Tokenizer.h");
			writer.WriteLine("//");
			writer.WriteLine("// AUTOGENERATED " + System.DateTime.Now + "");
			writer.WriteLine("//");
			writer.WriteLine("");
			writer.WriteLine("#ifndef " + name.ToUpper() + "TOKENIZER_H");
			writer.WriteLine("#define " + name.ToUpper() + "TOKENIZER_H");
			writer.WriteLine("");
			writer.WriteLine("#include <string>");
			writer.WriteLine("#include \"" + name + "Parser.h\"");
			writer.WriteLine("#include \"" + name + "Token.h\"");
			writer.WriteLine("");

			string[] namespaces = multinamespace.Split(new string[] { "::" }, StringSplitOptions.None);
			foreach (string nspace in namespaces)
			{
				writer.WriteLine("namespace " + nspace + "");
				writer.WriteLine("{");
			}

			writer.WriteLine("	class " + name + "Tokenizer");
			writer.WriteLine("	{");
			writer.WriteLine("	private:");
			writer.WriteLine("		" + name + "Tokenizer(const " + name + "Tokenizer&);");
			writer.WriteLine("		" + name + "Tokenizer operator=(const " + name + "Tokenizer&);");
			writer.WriteLine("		static int _counter;");
			writer.WriteLine("");
			writer.WriteLine("		" + name + "Parser* _parser;");
			writer.WriteLine("		int _state;");
			writer.WriteLine("		std::string _buffer;");
			writer.WriteLine("	public:");
			writer.WriteLine("		" + name + "Tokenizer(" + name + "Parser* parser) : _parser(parser), _state(0) { _counter++; }");
			writer.WriteLine("		~" + name + "Tokenizer() { _counter--; }");
			writer.WriteLine("		static int counter() { return _counter; }");
			writer.WriteLine("");
			writer.WriteLine("		void next(char ch);");
			writer.WriteLine("	};");

			foreach (string nspace in namespaces)
			{
				writer.WriteLine("}");
			}
			writer.WriteLine("#endif");
		}

		public static void GenerateClass(string name, string multinamespace, Bamboo.Parsing.FiniteAutomata.FiniteAutomaton finiteAutomaton, System.IO.TextWriter writer)
		{
			writer.WriteLine("//");
			writer.WriteLine("// " + name + "Tokenizer.cpp");
			writer.WriteLine("//");
			writer.WriteLine("// AUTOGENERATED " + System.DateTime.Now + "");
			writer.WriteLine("//");
			writer.WriteLine("");
			writer.WriteLine("#include <istream>");
			writer.WriteLine("#include \"" + name + "Parser.h\"");
			writer.WriteLine("#include \"" + name + "Token.h\"");
			writer.WriteLine("#include \"" + name + "Tokenizer.h\"");
			writer.WriteLine("#include \"" + name + "TokenType.h\"");
			writer.WriteLine("");

			string[] namespaces = multinamespace.Split(new string[] { "::" }, StringSplitOptions.None);
			foreach (string nspace in namespaces)
			{
				writer.WriteLine("namespace " + nspace + "");
				writer.WriteLine("{");
			}

			writer.WriteLine("	int " + name + "Tokenizer::_counter = 0;");
			writer.WriteLine("");
			writer.WriteLine("	void " + name + "Tokenizer::next(char ch)");
			writer.WriteLine("	{");
			writer.WriteLine("		switch(_state)");
			writer.WriteLine("		{");

			//TODO if ch == -1, purge remaining token.

			foreach (int state in finiteAutomaton.States)
			{
				bool isFinal = Operators.IsFinal(state, finiteAutomaton.FinalStates);
				bool hasTransitions = Operators.HasTransitions(state, finiteAutomaton.Transitions);

				if (!isFinal || hasTransitions)
				{
					writer.WriteLine("		case " + state + ":");
					writer.WriteLine("			{");
					writer.WriteLine("				switch (ch)");
					writer.WriteLine("				{");
					if (state == 0)
					{
						writer.WriteLine("				case ' ':");
						writer.WriteLine("				case '\\t':");
						writer.WriteLine("				case '\\r':");
						writer.WriteLine("				case '\\n':");
						writer.WriteLine("					{");
						writer.WriteLine("						// Trim whitespace");
						writer.WriteLine("						break;");
						writer.WriteLine("					}");
					}
					Surf.Set transitions = new Surf.Set();
					foreach (Bamboo.Parsing.FiniteAutomata.Transition transition in finiteAutomaton.Transitions)
					{
						if (transition.FromState == state)
						{
							transitions.Add(new Surf.Tuple(new object[] { transition.ToState, transition.Character }));
						}
					}
					transitions = transitions.Nest();
					foreach (Surf.Tuple transition in transitions)
					{
						foreach (char character in (Surf.Set)transition[1])
						{
							writer.WriteLine("				case '" + Operators.Escape(character) + "':");
						}
						writer.WriteLine("					{");
						bool isFinal2 = Operators.IsFinal((int)transition[0], finiteAutomaton.FinalStates);
						bool hasTransitions2 = Operators.HasTransitions((int)transition[0], finiteAutomaton.Transitions);
						writer.WriteLine("						_buffer += ch;");
						if (isFinal2 && !hasTransitions2)
						{
							Bamboo.Parsing.FiniteAutomata.Token token = Operators.GetToken((int)transition[0], finiteAutomaton.Tokens);
							writer.WriteLine("						" + name + "Token token(" + name + "TokenType::" + token.Name + ", _buffer);");
							writer.WriteLine("						_parser->next(token);");
							writer.WriteLine("						_state = 0;");
							writer.WriteLine("						_buffer.clear();");
						}
						else
						{
							writer.WriteLine("						_state = " + transition[0] + ";");
						}
						writer.WriteLine("						break;");
						writer.WriteLine("					}");
					}
					writer.WriteLine("				default:");
					writer.WriteLine("					{");
					if (isFinal)
					{
						Bamboo.Parsing.FiniteAutomata.Token token = Operators.GetToken(state, finiteAutomaton.Tokens);
						writer.WriteLine("						" + name + "Token token(" + name + "TokenType::" + token.Name + ", _buffer);");
						writer.WriteLine("						_parser->next(token);");
						writer.WriteLine("						_state = 0;");
						writer.WriteLine("						_buffer.clear();");
						writer.WriteLine("						next(ch);");
					}
					else
					{
						writer.WriteLine("						_buffer += ch;");
						writer.WriteLine("						" + name + "Token token(" + name + "TokenType::_ERROR_, _buffer);");
						writer.WriteLine("						_parser->next(token);");
					}
					writer.WriteLine("						break;");
					writer.WriteLine("					}");
					writer.WriteLine("				}");
					writer.WriteLine("				break;");
					writer.WriteLine("			}");
				}
			}
			writer.WriteLine("		default:");
			writer.WriteLine("			{");
			writer.WriteLine("				" + name + "Token token(" + name + "TokenType::_ERROR_, _buffer);");
			writer.WriteLine("				_parser->next(token);");
			writer.WriteLine("				break;");
			writer.WriteLine("			}");
			writer.WriteLine("		}");
			writer.WriteLine("	}");

			foreach (string nspace in namespaces)
			{
				writer.WriteLine("}");
			}
		}

		private TokenizerGenerator()
		{
		}

	}
}
