using System;
using System.Collections.Generic;
using System.Text;

namespace OracleInternal.SqlAndPlsqlParser
{
	internal class LexerToken
	{
		internal const char c_vNewLine = '\n';

		internal const char c_vCarriageReturn = '\r';

		internal static string c_vOperation = "(){}[]^-|!*+./><='\",;:%@?";

		internal static string c_vWhiteSpace = " \n\r\t";

		internal static string c_vOperationAndWhiteSpace = c_vOperation + c_vWhiteSpace;

		internal static string c_vWrappedMarker = "\"/\"";

		internal static char[] c_vExp = "eE".ToCharArray();

		internal static DoubleStageCharPropertiesTable c_vOperationTable = new DoubleStageCharPropertiesTable(c_vOperation);

		internal static DoubleStageCharPropertiesTable c_vWhitespaceTable = new DoubleStageCharPropertiesTable(c_vWhiteSpace);

		internal static DoubleStageCharPropertiesTable c_vOperationAndWhitespaceTable = new DoubleStageCharPropertiesTable(c_vOperationAndWhiteSpace);

		public string m_vContent;

		public string m_vReferrencedText;

		public int m_vBegin = -1;

		public int m_vEnd = -1;

		public Token m_vType;

		public LexerToken(string refText, int begin, int end, Token t)
			: this(refText, begin, end)
		{
			m_vType = t;
		}

		public LexerToken(string refText, int begin, int end)
		{
			m_vReferrencedText = refText;
			m_vBegin = begin;
			m_vEnd = end;
		}

		public void Print()
		{
			Console.WriteLine(ToString());
		}

		public override string ToString()
		{
			StringBuilder stringBuilder = new StringBuilder(128);
			stringBuilder.Append('[');
			stringBuilder.Append(m_vBegin);
			stringBuilder.Append(',');
			stringBuilder.Append(m_vEnd);
			stringBuilder.Append(") ");
			stringBuilder.Append(m_vContent);
			stringBuilder.Append("   <");
			stringBuilder.Append(m_vType);
			stringBuilder.Append('>');
			return stringBuilder.ToString();
		}

		public static void PrintTokens(List<LexerToken> src)
		{
			foreach (LexerToken item in src)
			{
				item.Print();
			}
			Console.WriteLine("------------------------------------------------------------------------");
		}

		public static List<LexerToken> Tokenize(string sourceExpr, bool quotedStrings)
		{
			List<LexerToken> list = new List<LexerToken>();
			StringTokenizer stringTokenizer = new StringTokenizer(sourceExpr, c_vOperationAndWhitespaceTable, includeDelims: true);
			int num = 0;
			char c = ' ';
			bool flag = false;
			LexerToken lexerToken = null;
			LexerToken lexerToken2 = null;
			char c2 = ' ';
			bool flag2 = false;
			int num2 = 0;
			char c3 = ' ';
			char c4 = ' ';
			bool flag3 = false;
			while (stringTokenizer.HasMoreTokens())
			{
				lexerToken = stringTokenizer.NextToken();
				int num3 = lexerToken.m_vEnd - lexerToken.m_vBegin;
				c2 = lexerToken.m_vReferrencedText[lexerToken.m_vBegin];
				num = lexerToken.m_vEnd;
				if (flag3)
				{
					if ('/' == c2 && num3 == 1 && lexerToken2 != null && '\n' == c3 && 1 == num2)
					{
						list.Add(new LexerToken(c_vWrappedMarker, 0, c_vWrappedMarker.Length, Token.IDENTIFIER));
						flag3 = false;
					}
					else if ('\n' == c2 && num3 == 1)
					{
						lexerToken.m_vType = Token.WS;
						list.Add(lexerToken);
						c3 = '\n';
						num2 = 1;
					}
					else if ('\n' == c3 && 1 == num2)
					{
						c3 = '?';
						num2 = 1;
					}
					continue;
				}
				if (lexerToken2 != null)
				{
					int vBegin = lexerToken2.m_vBegin;
					num2 = lexerToken2.m_vEnd - vBegin;
					string vReferrencedText = lexerToken2.m_vReferrencedText;
					c3 = vReferrencedText[lexerToken2.m_vBegin];
					c4 = vReferrencedText[lexerToken2.m_vEnd - 1];
					switch (lexerToken2.m_vType)
					{
					case Token.COMMENT:
						if (string.Compare("*/", 0, vReferrencedText, vBegin + num2 - 2, 2) != 0 || string.Compare("/*/", 0, vReferrencedText, vBegin, 3) == 0)
						{
							if ('*' == c2 || '/' == c2)
							{
								lexerToken2.m_vEnd = num;
							}
							continue;
						}
						break;
					case Token.LINE_COMMENT:
						if ('\n' != c2)
						{
							lexerToken2.m_vEnd = num;
							continue;
						}
						break;
					case Token.QUOTED_STRING:
						if (flag)
						{
							if (c == ' ')
							{
								c = c2 switch
								{
									'<' => '>', 
									'{' => '}', 
									'[' => ']', 
									'(' => ')', 
									_ => c2, 
								};
								lexerToken2.m_vEnd = num;
								continue;
							}
							lexerToken2.m_vEnd = num;
							if (c2 == '\'' && c == c4 && num2 > 3)
							{
								flag = false;
								c = ' ';
								string text = "'" + lexerToken2.m_vReferrencedText.Substring(lexerToken2.m_vBegin + 3, lexerToken2.m_vEnd - lexerToken2.m_vBegin - 5) + "'";
								lexerToken2 = new LexerToken(text, 0, text.Length, lexerToken2.m_vType);
							}
							continue;
						}
						if (c2 == '\'')
						{
							LexerToken lexerToken3 = stringTokenizer.PeekNextToken();
							if (lexerToken3 != null && lexerToken3.m_vReferrencedText[lexerToken3.m_vBegin] == '\'')
							{
								stringTokenizer.NextToken();
								num++;
								flag2 = true;
								continue;
							}
							lexerToken2.m_vEnd = num;
							if (flag2)
							{
								string text2 = "'" + lexerToken2.m_vReferrencedText.Substring(lexerToken2.m_vBegin + 1, lexerToken2.m_vEnd - lexerToken2.m_vBegin - 2).Replace("''", "'") + "'";
								lexerToken2 = new LexerToken(text2, 0, text2.Length, lexerToken2.m_vType);
								flag2 = false;
							}
							continue;
						}
						if (num2 == 1 || c4 != '\'')
						{
							continue;
						}
						break;
					case Token.DQUOTED_STRING:
						if (c2 == '"')
						{
							lexerToken2.m_vEnd = num;
							continue;
						}
						if (num2 == 1 || c4 != '"')
						{
							continue;
						}
						break;
					default:
						if (num3 == 1)
						{
							if ('*' == c2 && '/' == c3 && num2 == 1)
							{
								lexerToken2.m_vEnd = num;
								lexerToken2.m_vType = Token.COMMENT;
								continue;
							}
							if ('-' == c2 && '-' == c3 && num2 == 1)
							{
								lexerToken2.m_vEnd = num;
								lexerToken2.m_vType = Token.LINE_COMMENT;
								continue;
							}
						}
						if (string.Compare("rem", 0, lexerToken.m_vReferrencedText, lexerToken.m_vBegin, 3, ignoreCase: true) == 0 && num2 == 1 && ('\n' == c3 || '\r' == c3))
						{
							lexerToken2 = new LexerToken(lexerToken.m_vReferrencedText, num - 1, num - 1, Token.LINE_COMMENT);
							list.Add(lexerToken2);
							continue;
						}
						break;
					}
				}
				else if (string.Compare("rem", 0, lexerToken.m_vReferrencedText, lexerToken.m_vBegin, 3, ignoreCase: true) == 0)
				{
					lexerToken2 = new LexerToken(lexerToken.m_vReferrencedText, num - 1, num - 1, Token.LINE_COMMENT);
					list.Add(lexerToken2);
					continue;
				}
				if (num3 == 1)
				{
					if (quotedStrings)
					{
						switch (c2)
						{
						case '\'':
							if (lexerToken2 != null && num2 == 1)
							{
								if ('q' == c3 || 'Q' == c3)
								{
									flag = true;
									lexerToken2.m_vType = Token.QUOTED_STRING;
								}
								else if ('n' == c3 || 'N' == c3)
								{
									lexerToken2.m_vType = Token.QUOTED_STRING;
								}
							}
							lexerToken2 = new LexerToken(lexerToken.m_vReferrencedText, num - 1, num, Token.QUOTED_STRING);
							list.Add(lexerToken2);
							continue;
						case '"':
							lexerToken2 = new LexerToken(lexerToken.m_vReferrencedText, num - 1, num - 1, Token.DQUOTED_STRING);
							list.Add(lexerToken2);
							continue;
						}
					}
					if (c_vOperationTable.Contains(c2))
					{
						lexerToken.m_vType = Token.OPERATION;
						lexerToken2 = lexerToken;
						list.Add(lexerToken2);
						continue;
					}
					if (c_vWhitespaceTable.Contains(c2))
					{
						lexerToken.m_vType = Token.WS;
						lexerToken2 = lexerToken;
						list.Add(lexerToken2);
						continue;
					}
				}
				if (char.IsDigit(c2))
				{
					int num4 = lexerToken.m_vReferrencedText.IndexOfAny(c_vExp, lexerToken.m_vBegin, num3);
					if (num4 == -1)
					{
						lexerToken.m_vType = Token.DIGITS;
						lexerToken2 = lexerToken;
					}
					else
					{
						list.Add(new LexerToken(lexerToken.m_vReferrencedText, lexerToken.m_vBegin, num4, Token.DIGITS));
						lexerToken2 = new LexerToken(lexerToken.m_vReferrencedText, num4, num4 + 1, Token.IDENTIFIER);
						if (num4 != lexerToken.m_vEnd - 1)
						{
							list.Add(lexerToken2);
							lexerToken2 = new LexerToken(lexerToken.m_vReferrencedText, num4 + 1, lexerToken.m_vEnd, Token.DIGITS);
						}
					}
					list.Add(lexerToken2);
					continue;
				}
				if (string.Compare("wrapped", 0, lexerToken.m_vReferrencedText, lexerToken.m_vBegin, 7, ignoreCase: true) == 0 && lexerToken2 != null)
				{
					bool flag4 = false;
					for (int num5 = list.Count - 1; num5 >= 0; num5--)
					{
						LexerToken lexerToken4 = list[num5];
						string text3 = lexerToken4.m_vReferrencedText.Substring(lexerToken4.m_vBegin, lexerToken4.m_vEnd - lexerToken4.m_vBegin).ToUpper();
						if (flag4)
						{
							switch (text3[0])
							{
							case 'P':
								if ("PROCEDURE" == text3 || "PACKAGE" == text3)
								{
									flag3 = true;
								}
								break;
							case 'F':
								if ("FUNCTION" == text3)
								{
									flag3 = true;
								}
								break;
							case 'T':
								if ("TRIGGER" == text3 || "TYPE" == text3)
								{
									flag3 = true;
								}
								break;
							case 'B':
								if ("BODY" == text3)
								{
									flag3 = true;
								}
								break;
							}
							if (flag3)
							{
								break;
							}
						}
						if (lexerToken4.m_vType != Token.WS && lexerToken4.m_vType != Token.COMMENT)
						{
							if (lexerToken4.m_vType != Token.IDENTIFIER)
							{
								break;
							}
							flag4 = true;
						}
					}
				}
				lexerToken.m_vType = Token.IDENTIFIER;
				lexerToken2 = lexerToken;
				list.Add(lexerToken2);
			}
			return list;
		}

		public static List<LexerToken> Parse(string input)
		{
			return Parse(input, keepWsAndComments: false, quotedStrings: true);
		}

		public static List<LexerToken> Parse(string input, bool keepWsAndComments, bool quotedStrings)
		{
			List<LexerToken> list = new List<LexerToken>();
			LexerToken lexerToken = null;
			foreach (LexerToken item in Tokenize(input, quotedStrings))
			{
				switch (item.m_vType)
				{
				case Token.QUOTED_STRING:
					if (lexerToken != null && lexerToken.m_vType == Token.QUOTED_STRING)
					{
						if (lexerToken.m_vContent == null)
						{
							lexerToken.m_vContent = lexerToken.m_vReferrencedText.Substring(lexerToken.m_vBegin, lexerToken.m_vEnd - lexerToken.m_vBegin);
						}
						if (item.m_vContent == null)
						{
							item.m_vContent = item.m_vReferrencedText.Substring(item.m_vBegin, item.m_vEnd - item.m_vBegin);
						}
						lexerToken.m_vContent = (lexerToken.m_vReferrencedText = lexerToken.m_vContent + item.m_vContent);
						lexerToken.m_vBegin = 0;
						lexerToken.m_vEnd = lexerToken.m_vReferrencedText.Length;
						continue;
					}
					break;
				case Token.COMMENT:
				case Token.LINE_COMMENT:
				case Token.WS:
					if (!keepWsAndComments)
					{
						continue;
					}
					break;
				}
				list.Add(item);
				item.m_vContent = item.m_vReferrencedText.Substring(item.m_vBegin, item.m_vEnd - item.m_vBegin);
				lexerToken = item;
			}
			return list;
		}
	}
}
