using System;
using System.Collections; 
using System.IO; 
using Parseq.Core; 

namespace Parseq.Compiler
{
	/* a TokenParser2 is a ChoiceParser whose choices all generate tokens. 
	 * NOTE in order to produce all tokens one must take care of whitespaces.
	 * that means in the sense of the parser it is treated as a reader instead of just a parser. 
	 * the next parser will read into a particular 
	 * i.e. the interface for the higher level reader is to read from the Reader... this is what it is! 
	 * each parser will match one single token... 
	 * 
	 * somehow it always seems difficult to separate the parsing language from the lexing language as they seem to share
	 * quite a bit, so probably doesn't make sense to try to do so. 
	 * 
	 * what I should do though is to define the concept of a token & a rule, and then define how to build the parser
	 * since this is about a parser tool. 
	 * 
	 * rule # 1 - having tokenizer seem to make quite a bit of sense so one does not need to backtrack too much 
	 * - without this rule it can take quite a bit of effort to specify 
	 * - to do this we'll have to fix the reader interface so it can be used to return more than just char.
	 * 
	 * rule # 2 - we'll have to be able to hold a list of definitions of tokens & rules (while having them sharing
	 * quite a bit of grammar) and use ways to determine whether they'll parse appropriately. 
	 * 
	 * Token - cannot refer other tokens
	 * 
	 * rule - can refer other rules, including self (call out for Left Recursion grammar - at least the simple one). 
	 * 
	 * a token reader is a parser & a reader. 
	 * 
	 */
	
	public class TokenReader2 : IReader , IDisposable {
		private CharReader reader; 
		private BaseParser parser; 
		private Hashtable skipList; // do I want this to be done here or passed in? 
        private Hashtable readTokens = new Hashtable(); 
		public TokenReader2(BaseParser parser, Hashtable skipList, CharReader reader)  { 
			this.parser = parser; 
			this.skipList = skipList; 
			this.reader = reader; 
		}
		
		public TokenReader2(BaseParser parser, Hashtable skipList, Stream s) {
			this.parser = parser; 
			this.skipList = skipList; 
			this.reader = new CharReader(s); 
		}
		
		public object Peek() { // what does peek do here? not too much. but what do I want to 
			Location temp = this.reader.Location; 
			Token result = (Token)this.Read(); 
			this.reader.Location = temp; 
			return result; 
		}
		
		// how to read the ones that are being skipped? they'll be appended to the next successful read. 
		public object Read() { 
			// at the very beginning there is a single point where we have a 
			Token temp; 
			do {
				temp = this.readToken(); 
			} while (this.skipList.Contains(temp.Type)); 
			return temp; 
		}
		
		private Token readToken() {
            Location current = this.Location; 
			if (this.Location.Bytes == -1) {
				this.Location = this.Location.IncBytes(1); 
				return new Token("BOF", null); 
			}
			
			int nextChar = (int)this.reader.Peek(); 
			if (nextChar == -1) { // we have more bytes to read.
				return new Token("EOF", null); 
			}
            if (this.readTokens.Contains(current.Bytes)) // this is memoization. not sure if this is worth it rather than parsing.. hmm... 
            {
                // in order to cache we'll also need to know what the next Location will be so we can advance appropriately! 
                object[] result = (object[])this.readTokens[this.Location.Bytes];
                this.Location = new Location((long)result[1]);
                return (Token)result[0];
            }
            else
            {
                Result result = this.parser.Parse(this.reader, this.Location);
			    if (Result.Succeeded(result)) {
                    this.readTokens[current.Bytes] = new object[2]{ result.Inner , result.Position.Bytes }; 
				    if (result.Inner is Token) {
					    return (Token)result.Inner; 
				    } else {
					    return new Token("UNKNOWN", result.Inner); 
				    }
			    } else {
				    return new Token("FAIL", null); // should this just return NULL? not sure at this moment... hmmm... 
			    }
            }
        }
		
		public Location Location {
			get { return this.reader.Location; } 
			set { this.reader.Location = value; } 
		}
		
		public void Dispose() {
			this.reader.Dispose(); 
		}
	}
	
    /*
     * it would be nice if all I have to specify is a set of token strings instead of specifying parsers. 
     * but that means we'll need a way to do algebraic data type & implicit casting. This can be difficult. 
     * 
     * with C# this is probably not a solvable problem so makes no sense to solve it here, what we want though would be a 
     * factory 
     */
	public class TokenParser2 : BaseParser {
		private string tokenType;
        private Type valueType; 
		public TokenParser2(string tokenType, Type valueType) {
			this.tokenType = tokenType;
            this.valueType = valueType; 
		}
		public override Result InnerParse (IReader reader, Location pos)
		{
			if (!(reader is TokenReader2)) {
				throw new ArgumentException("reader is not TokenReader"); 
			}
            reader.Location = pos; // we can make the reader caching the previous reads. 
			Token next = (Token)reader.Read(); 
			if (next.Type == this.tokenType) {
				return new Result(next, reader.Location); 
			} else {
				return Result.MakeFail(reader, pos); 
			}
		}

        public TokenParser2 Clone()
        {
            return (TokenParser2)this.MemberwiseClone();
        }

        public override object InnerTransform(object val)
        {
            // the incoming object is a 
            return base.InnerTransform(((Token)val).Value); 
        }
	}

    public class TokenCombinator
    {
        private Hashtable tokens = new Hashtable();

        public TokenCombinator()
        {
        }

        public void AddToken(string name)
        {
            this.AddToken(name, typeof(object));
        }

        public void AddToken(string name, Type type)
        {
            this.AssertTokenNotExists(name);
            this.tokens.Add(name, new TokenParser2(name, type)); 
        }

        private void AssertTokenExists(string name)
        {
            if (!this.tokens.Contains(name))
            {
                throw new ArgumentException(string.Format("Token {0} is undefined", name));
            }
        }

        private void AssertAllTokensExist(string[] tokens)
        {
            foreach (string token in tokens) {
                this.AssertTokenExists(token); 
            }
        }

        private void AssertTokenNotExists(string name)
        {
            if (this.tokens.Contains(name))
            {
                throw new ArgumentException(string.Format("Token {0} has already been defined", name));
            }
        }

        public BaseParser ZeroMany(string inner)
        {
            return Combinator.ZeroMany(this.Token(inner)); 
        }

        public BaseParser OneMany(string inner)
        {
            return Combinator.OneMany(this.Token(inner)); 
        }

        public BaseParser ZeroOne(string token, object defaultVal)
        {
            return Combinator.ZeroOne(this.Token(token), defaultVal); 
        }

        public BaseParser Sequence(params string[] tokens)
        {
            return Combinator.MakeSequence(Array.ConvertAll<string,BaseParser>(tokens, delegate(string token) { return this.Token(token);}));
        }

        public BaseParser Choice(params string[] tokens)
        {
            return Combinator.MakeChoice(Array.ConvertAll<string, BaseParser>(tokens, delegate(string token) { return this.Token(token); }));
        }

        public BaseParser Repeat(string inner, long min, long max, object defaultResult)
        {
            return Combinator.Repeat(this.Token(inner), min, max, defaultResult);
        }

        public BaseParser Token(string token)
        {
            this.AssertTokenExists(token);
            return ((TokenParser2)this.tokens[token]).Clone(); 
        }

    }

}

