﻿//#define NEW_WAY
using System;
using System.Text;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;

using alib;
using alib.Enumerable;
using alib.Debugging;
using alib.Collections.ReadOnly;

namespace agree
{
	using rewriter.regex;

	////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// A bit of navigation harness dedicated to the special-purpose TFS reader whose sole function is to navigate 
	/// the zero or more TDL token streams which comprise each TDL feature structure definitions, producing a
	/// fully-formed TFS for each. This is one of the earliest extant parts of the agree system code, dating from
	/// around 2008. Coreferences are incrementally joined during the traversal according to an ad-hoc method
	/// that was developed back then, but since the code has never drawn attention to itself, it lives on for this
	/// bootstrapping task. Testing with a few different DELPH-IN grammars suggests that the resulting structures 
	/// are compliant with the Joint Reference Standard, including the TDL extensions of which I am aware.
	/// 
	/// The historical tour is brief, however. Immediately after finalizng one of these mutable TFSes, it is 
	/// copied into agree's modern Array TFS Stroage representation--an immutable form--for operational use, and the bootstrap 
	/// TFSes are discarded.
	////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public abstract class TdlParsePos : WriteableTfs
	{
		public TdlParsePos(IRtParent parent, IIdentity prv, Restrictor r)
			: base(parent, prv, r)
		{
			this.ipos = -1;
		}

		IList<TdlTok> src_tokens;
		int ipos;
		public List<Regex> rx_match;
		public List<RegexReplaceRaw> rx_repl;

		protected bool Eof { [DebuggerStepThrough] get { return ipos >= src_tokens.Count; } }
		protected TdlTok Current { [DebuggerStepThrough] get { return src_tokens[ipos]; } }
		protected TdlTok CurrentSafe { [DebuggerStepThrough] get { return src_tokens == null ? TdlTok.Null : ipos < src_tokens.Count ? src_tokens[ipos] : TdlTok.Eof; } }
		protected TdlTok.Type CurTokType { [DebuggerStepThrough] get { return Eof ? TdlTok.Type.EOF : src_tokens[ipos].type_id; } }
		protected String CurTokIdent { [DebuggerStepThrough] get { return src_tokens[ipos].Text; } }
		protected String CurTokString { [DebuggerStepThrough] get { return Eof ? "EOF" : src_tokens[ipos].ToString(); } }
		protected FilePos FilePos { [DebuggerStepThrough] get { return src_tokens[ipos].FilePos; } }
		protected bool MoveNext()
		{
			if (ipos < src_tokens.Count)
				ipos++;
			return ipos < src_tokens.Count;
		}
		[DebuggerStepThrough]
		protected TdlTok MoveNextThrow(String error_msg = null)
		{
			if (error_msg == null)
				error_msg = "Incomplete TDL constraint specification.";
			if (ipos < src_tokens.Count - 1)
				return src_tokens[++ipos];
			throw new TdlException(FilePos, error_msg);
		}
		[DebuggerStepThrough]
		protected void VerifyTokenType(TdlTok.Type ψψ, String msg = null)
		{
			if (msg == null)
				msg = String.Format("Expected '{0}'.", ψψ.ToString());
			if (src_tokens[ipos].type_id != ψψ)
				throw new TdlException(FilePos, msg);
		}

		protected void Reset(IList<TdlTok> toks)
		{
			ipos = -1;
			this.src_tokens = toks;
		}

		public override String ToString()
		{
			if (src_tokens == null)
				return ("(null parse position)");
			return "\"… " + src_tokens.Skip(Math.Max(ipos - 1, 0)).Take(3).StringJoin(" ") + " …\"";
		}
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// for transforming sequences of TDL tokens into persisted TFSs.
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{ToString(),nq}")]
	public abstract partial class TfsBuilder : TdlParsePos
	{
		TfsBuilder(IRtParent parent, IIdentity prv, Restrictor r)
			: base(parent, prv, r)
		{
			this.type = prv as Type;
		}
		Type type;

#if false
		Fstk fstk;
		Mstk mstk;

		unsafe struct Fstk : IDisposable
		{
			public Fstk(TfsBuilder tfs)
			{
				this.tfs = tfs;
				this.i_stk = 0;
				this.stk = new int[256];
			}
			readonly TfsBuilder tfs;
			readonly int[] stk;
			public int i_stk;

			public IDisposable Push(int i_feat)
			{
				if (i_stk >= stk.Length)
					throw new Exception();
				stk[i_stk++] = i_feat;
				return this;
			}
			//public int Peek()
			//{
			//	if (i_stk == 0)
			//		throw new Exception();
			//	return stk[i_stk - 1];
			//}

			public void Dispose()
			{
				if (i_stk == 0)
					throw new Exception();
				i_stk--;
			}
		}

		unsafe struct Mstk : IDisposable
		{
			public Mstk(TfsBuilder tfs)
			{
				this.tfs = tfs;
				this.i_mstk = 0;
				this.stk = new int[256];
			}
			readonly TfsBuilder tfs;
			readonly int[] stk;
			int i_mstk;

			public IDisposable Push()
			{
				if (i_mstk >= stk.Length)
					throw new Exception();
				stk[i_mstk++] = tfs.fstk.i_stk;
				Console.Write(" " + i_mstk + "+");
				return this;
			}
			public int Peek()
			{
				if (i_mstk == 0)
					throw new Exception();
				return stk[i_mstk - 1];
			}

			public void Dispose()
			{
				if (i_mstk == 0)
					throw new Exception();
				Console.Write(" -" + i_mstk);
				tfs.fstk.i_stk = stk[--i_mstk];
			}
		}
#endif
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void SetConstraint(ConstraintRef cref, Edge e)
		{
			SetEdge(cref.i_feat, cref.Host.Mark, e);
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void SwitchToFeature(ref ConstraintRef cref, int ifeat_next)
		{
			if (cref.i_feat == -1 || cref.i_feat != ifeat_next)
			{
				if (ifeat_next == -1 || !cref.HostType.HasFeature(ifeat_next))
					throw new Exception();
				cref.i_feat = ifeat_next;
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// always stays in the same TFS
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public ConstraintRef NextConstraint(ConstraintRef cref, int ifeat_next)
		{
			Edge e;
			TryGetEdge(cref.i_feat, cref.Host.Mark, out e);
			return new ConstraintRef(this, e, ifeat_next);
		}

#if true
		public void SetHostType(ref ConstraintRef cref, Type new_type)
		{
			if (cref.HostType != new_type)
			{
				cref.Host = CreateEdge(new_type, cref.Host.IsCoreferenced);
				if (!cref.HostType.HasFeature(cref.i_feat))
					cref.i_feat = -1;
			}
		}
#endif

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void _accept_base_feature_constraint(Edge e, IList<TdlTok> toks)
		{
			base.Reset(toks);
			if (base.MoveNext())
			{
				var cr = new ConstraintRef(this, e, -1);
				AcceptFeatures(ref cr, TdlTok.Type.TokMap_Empty);
			}

			if (!base.Eof)
				throw new TdlException(FilePos, "TDL parser returned without using all tokens.");
		}

		Edge.Flag _AcceptFeatures(ref ConstraintRef cr, TdlTok.Type tok_map)
		{
			ConstraintRef crx = cr;
			return AcceptFeatures(ref crx, tok_map);
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		Edge.Flag AcceptFeatures(ref ConstraintRef cr, TdlTok.Type tok_map)
		{
			int ifeat_start = cr.i_feat;
			int hm = cr.Host.Mark;
			//Debug.Assert(hm > 0);
			Edge.Flag f = cr.Host.FlagsId;

			while (!base.Eof)
			{
				//  _                 _
				// |  current          |
				// |  FEAT  type & ... |
				// |_ ^               _|

				VerifyTokenType(TdlTok.Type.Identifier, "Expected a feature name (1)");
				int i_feat = im.ftm.GetFeatureIndex(base.CurTokIdent);
				if (ifeat_start == -1)
					ifeat_start = i_feat;

				if (!cr.HostType.HasFeature(i_feat))
				{
					Type tt_maximal = im.GetMaximalTypeForFeature(i_feat);
					if (tt_maximal == null)
					{
						MoveNextThrow();
						MoveNext();
						return 0;
					}
					Type glb;
					if ((glb = im.tu.UnifyTypesFull(cr.HostType, tt_maximal)) == null)
						throw new TdlException(FilePos, "Error inferring type. '{0}' is not appropriate for  '{1}'. (Maximal type '{1}' for feature '{0}' does not unify with previously seen type '{2}'.)", base.CurTokIdent.ToUpper(), cr.HostType.Name, tt_maximal.Name);
#if NEW_WAY
					SetHostType(ref cr, glb);
					f = glb.EdgeFlag | (cr.Host.FlagsId & Edge.Flag.Coreference);
#else
					cr.SetHostType(glb);
					f = glb.EdgeFlag | (cr.Host.FlagsId & Edge.Flag.Coreference);

#endif
				}
				SwitchToFeature(ref cr, i_feat);

				TdlTok.Type tok = MoveNextThrow().type_id;

				// Follow a path specification
				if (tok == TdlTok.Type.Dot)
				{
					MoveNextThrow();
					VerifyTokenType(TdlTok.Type.Identifier, "Error following feature path specification. Expected a feature name after '.'");

					//  _                    _
					// | current              |
					// | F1 . F2  type &  ... |
					// |_     ^              _|
					//
					// If the next feature in the path is not appropriate for the current type, now is the time to infer 
					// a more specific type by peeking ahead. To that feature, assign a node with a type corresponding to 
					// the maximal type for the next feature in the path. 

#if NEW_WAY
					var crx = new ConstraintRef(this, default(Edge), i_feat);
					var ef = AcceptFeatures(ref crx, tok_map  | TdlTok.Type.Comma);

					SetConstraint(cr, new Edge(ef, crx.Host.Mark));

					if (base.Eof || (tok_map & Current.ψ) != 0)
						return f;

					VerifyTokenType(TdlTok.Type.Comma, "");
					MoveNextThrow();
#else
					i_feat = im.ftm.GetFeatureIndex(base.CurTokIdent);
					if (!cr.ConstraintType.HasFeature(i_feat))
					{
						Type tt_maximal = im.GetMaximalTypeForFeature(i_feat);
						if (cr.UnifyInConstraintType(tt_maximal) < 0)
							throw new TdlException(FilePos, "Error inferring type. Existing type {0} for feature {1} failed to unify with maximal type {2} ", cr.ConstraintType.Name, base.CurTokIdent.ToUpper(), tt_maximal.Name);
					}
					cr = NextConstraint(cr, i_feat);
#endif
				}
				else if (tok == TdlTok.Type.Identifier || tok == TdlTok.Type.Tag || tok == TdlTok.Type.XMapTag || tok == TdlTok.Type.SquareOpen ||
							tok == TdlTok.Type.AngleOpen || tok == TdlTok.Type.DifferenceListOpen || tok == TdlTok.Type.String || tok == TdlTok.Type.Integer ||
							tok == TdlTok.Type.RegEx || tok == TdlTok.Type.RegExRepl)
				{
					//  _                 _
					// | current           |
					// | FEAT  type &  ... |
					// |_      ^          _|
					AcceptType(cr, tok_map);

					if (base.Eof || (tok_map & Current.type_id) != 0)
						return f;

					// restore the parse level after (possibly) accepting a dotted feature path
#if !NEW_WAY
					cr = new ConstraintRef(this, new Edge(f, hm), ifeat_start);
#endif
				}
				else
					throw new TdlException(FilePos, "Expected: '.', type identifier, tag, '[', '<', or '<!'.");
			}
			throw new TdlException(FilePos, "Unexpected end of token stream while parsing TDL.");
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		/// Unify together (multiple) type- and feature-structure constraints on this feature separated by TOK_AMP
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void AcceptType(ConstraintRef cr, TdlTok.Type tok_map)
		{
			//  _                                        _
			// | current                                  |
			// | FEAT    type & #tag & [constraint] & ... |
			// |_        ^                               _|
			//
			while (!base.Eof)
			{
				Type tt;
				var ψ = Current.type_id;

				if (ψ == TdlTok.Type.Identifier)
				{
					if (!td.TryGetType(Current.Text, out tt))
						TdlTokenizer.ErrorExit(Current, "invalid type: '{0}'", Current.Text);
					if (tt.IsTop)
					{
						/// not calling 'UnifyInConstraint' means that no edge is added to the dictionary. If the condition
						/// persists, a EmptyFeatureStructureTfs will be eventually created instead
					}
					else if (cr.UnifyInConstraintType(tt) < 0)
						throw new TdlException(FilePos, "existing type {0} on feature {1} failed to unify with {2}", cr.ConstraintType.Name, cr.Feature.ToUpper(), tt.Name);
				}
				else if (ψ == TdlTok.Type.String || ψ == TdlTok.Type.RegEx || ψ == TdlTok.Type.RegExRepl)
				{
					Edge existing_edge = cr.Constraint;
					String es = tu.GetStringValue(existing_edge.FlagsId);
					if (es != null && es != Current.Text)
						throw new TdlException(FilePos, "string value '{0}' for feature {1} failed to unify with string '{2}'",
							es,
							cr.Feature.ToUpper(),
							Current.Text);

					/* must unify down to string */
					if (!tu.CanUnifyToString(existing_edge.FlagsId))
						throw new TdlException(FilePos, "existing type {0} on feature {1} failed to unify with string type '{2}'",
							cr.ConstraintType.Name,
							cr.Feature.ToUpper(),
							td.ΔString.Name);

					if (ψ == TdlTok.Type.RegExRepl)
					{
						SetConstraint(cr, new Edge(td.ΔString.EdgeFlag, 0));

						var ipath = _ixp2(cr.TfsSlot.ix1).First();
						var path = new Ix1Path(this, ipath).FsPath;

						RegexReplaceRaw tna = new RegexReplaceRaw(path, Current.Text);
						if (rx_repl == null)
							rx_repl = new List<RegexReplaceRaw>();
						rx_repl.Add(tna);
					}
					else if (ψ == TdlTok.Type.RegEx)
					{
						SetConstraint(cr, new Edge(td.ΔString.EdgeFlag, 0));

						var ipath = _ixp2(cr.TfsSlot.ix1).First();
						var path = new Ix1Path(this, ipath).FsPath;

						Regex tna = new Regex(path, Current.Text);
						if (rx_match == null)
							rx_match = new List<Regex>();
						rx_match.Add(tna);
					}
					else
					{
						SetConstraint(cr, tu.CreateStringEdge(Current.Text));
					}
				}
				else if (ψ == TdlTok.Type.Tag)
				{
					AddCoref(Current, cr.FeatMark);
				}
				else if (ψ == TdlTok.Type.Integer)
				{
					SetConstraint(cr, TypeUtils.CreateIntegerEdge(int.Parse(Current.Text)));
				}
				else if (ψ == TdlTok.Type.XMapTag)
				{
#if true
					AddCoref(Current, cr.FeatMark);
#else
					XMapTag xmt;
					if (!xmaptags.TryGetValue(Current.i_s, out xmt))
					{
						xmt = new XMapTag(Current.i_s);
						xmaptags.Add(Current.i_s, xmt);
					}

					xmt.Add(new TmFeatMark(tm, cr.FeatMark));
#endif
				}
				else if (ψ == TdlTok.Type.SquareOpen)
				{
					MoveNextThrow();

					if (Current.type_id == TdlTok.Type.SquareClose)
					{
						//   < [ ] , ... >
						//       ^
						//	(do nothing)
					}
					else
					{
#if NEW_WAY
						var crx = new ConstraintRef(this, default(Edge), -1);
						var ef = AcceptFeatures(ref crx, TdlTok.Type.TokMap_SqCl);

						SetConstraint(cr, new Edge(ef, crx.Host.Mark));

#else
						VerifyTokenType(TdlTok.Type.Identifier, "Expected a feature name (2)");

						// infer type by peeking ahead in the given path
						int i_feat_next = im.ftm[base.CurTokIdent].i_feat;
						if (i_feat_next == -1)
							throw new TdlException(FilePos, "Error inferring type. Feature {0} was not defined for any type", base.CurTokIdent.ToUpper());
						if (!cr.ConstraintType.HasFeature(i_feat_next) &&
							cr.UnifyInConstraintType(tt = im.GetMaximalTypeForFeature(i_feat_next)) < 0)
							throw new TdlException(FilePos, "Error inferring type. Existing type {0} for feature {1} failed to unify with maximal type {2}", cr.ConstraintType.Name, base.CurTokIdent.ToUpper(), tt.Name);

						// recurse to build a TFS for the contents of the square brackets
						Edge e_next = cr.Constraint;

						var crx = new ConstraintRef(this, e_next, i_feat_next);

						var ef = AcceptFeatures(ref crx, TdlTok.Type.TokMap_SqCl);
						SetConstraint(cr, new Edge(ef, e_next.Mark));
#endif
					}
				}
				else if (ψ == TdlTok.Type.AngleOpen)
				{
					//  _                            _
					// | current                      |
					// | FEAT    < >                  |
					// |           ^                  |
					// | FEAT    < ... >              |
					// |           ^                  |
					// | FEAT    < type ...  >        |
					// |           ^                  |
					// | FEAT    < #tag ... >         |
					// |           ^                  |
					// | FEAT    < [ constraint ... > |
					// |_          ^                 _|

					MoveNextThrow();

					if (Current.type_id != TdlTok.Type.AngleClose)
						AcceptList(cr);
					else if (cr.UnifyInConstraintType(im.tt_empty) < 0)
						throw new TdlException(FilePos, "Error expanding list. Existing type {0} for feature {1} failed to unify with empty list type {2}", cr.ConstraintType.Name, cr.Feature.ToUpper(), im.tt_empty.Name);
					else
						VerifyTokenType(TdlTok.Type.AngleClose);
				}
				else if (ψ == TdlTok.Type.DifferenceListOpen)
				{
					MoveNextThrow();

					String last_coref = "diff-list-" + Guid.NewGuid().ToString();

					if (cr.UnifyInConstraintType(im.tt_dlist) < 0)
						throw new TdlException(FilePos, "Error expanding difference list. Existing type {0} for feature {1} failed to unify with difference list type {2} ", cr.ConstraintType.Name, cr.Feature.ToUpper(), im.tt_dlist.Name);

					ConstraintRef cref_dl = NextConstraint(cr, im.f_ix_dlist_list);
					if (Current.type_id != TdlTok.Type.DifferenceListClose)
						AcceptDifferenceList(cref_dl, last_coref);
					else
					{
						if (cref_dl.UnifyInConstraintType(im.tt_list) < 0)
							throw new TdlException(FilePos, "Error expanding difference list. Existing type {0} for feature {1} failed to unify with list type {2} ",
								cref_dl.ConstraintType.Name,
								cr.Feature.ToUpper(),
								im.tt_list.Name);
						//Current.Form = last_coref;
						Current.text = last_coref;
						AddCoref(Current, cref_dl.FeatMark);
					}

					// unify 'LAST list'
					SwitchToFeature(ref cref_dl, im.f_ix_dlist_last);
					if (cref_dl.UnifyInConstraintType(im.tt_list) < 0)
						throw new TdlException(FilePos, "Error expanding difference list. Existing type {0} for feature {1} failed to unify with list type {2} ",
							cr.ConstraintType.Name,
							cr.Feature.ToUpper(),
							im.tt_list.Name);

					AddCoref(Current, cref_dl.FeatMark);

					VerifyTokenType(TdlTok.Type.DifferenceListClose);
				}
				else
					throw new TdlException(FilePos, "Expected a type, tag, or constraint specification");

				// either 1. we're done with all constraints 
				if (!MoveNext())
					return;

				// 2. we're done with a block grouped by [ x ]  < y , >  <! z , !>  < x . y > originating from this base level...
				ψ = Current.type_id;
				if ((tok_map & ψ) != 0)
					return;

				// 3. there are additional features constrained from this base level...
				if (ψ == TdlTok.Type.Comma)
				{
					MoveNextThrow();
					return;
				}

				// 4. or there are additional constraints on this type.
				VerifyTokenType(TdlTok.Type.Ampersand);
				MoveNext();
			}
			throw new TdlException(FilePos, "Unexpected end of token stream while parsing TDL.");
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void AcceptList(ConstraintRef cr)
		{
			if (cr.UnifyInConstraintType(im.tt_ne_list) < 0)
				throw new TdlException(FilePos, "Error expanding list. Existing type {0} for feature {1} failed to unify with list type {2} ", cr.ConstraintType.Name, cr.Feature.ToUpper(), im.tt_ne_list.Name);

			ConstraintRef cref_fwd = cr;
			//if (c_parts > 0)
			{
				cref_fwd = NextConstraint(cr, im.f_ix_list_head);
				AcceptType(cref_fwd, TdlTok.Type.TokMap_AngCl_Comma_Dot);

				// select feature 'REST'
				SwitchToFeature(ref cref_fwd, im.f_ix_list_tail);


				if (Current.type_id == TdlTok.Type.Dot)
				{
					//Debug.Assert(c_parts == 1, "should have prevented dotted pair with >2 parts during tokenization");
					//  _                            _
					// | current                      |
					// | FEAT    < a . b >            |
					// |_            ^               _|
					MoveNextThrow();

					AcceptType(cref_fwd, TdlTok.Type.TokMap_AngCl_Comma_Dot);
					//SetEdge(_im.f_ix_list_head,cr.Host.Mark,cref_fwd.
					//SetConstraint(cr, cref_fwd.Host);

					if (Current.type_id != TdlTok.Type.AngleClose)
						throw new Exception();

					//MoveNextThrow();
					return;
					//AcceptType(cref_fwd, TdlTok.Type.TokMap_AngCl);
					//c_parts--;
				}
				else if (Current.type_id == TdlTok.Type.Comma)
				{
					if (cref_fwd.UnifyInConstraintType(im.tt_list) < 0)
						throw new Exception();

					MoveNextThrow();
					if (Current.type_id == TdlTok.Type.Ellipsis)
					{
						MoveNextThrow();
					}

					if (Current.type_id != TdlTok.Type.AngleClose)
					{
						AcceptList(cref_fwd);
						SetConstraint(cr, cref_fwd.Host);
					}
				}
				else if (Current.type_id == TdlTok.Type.AngleClose)
				{
					//term_type = _im.tt_empty;
					if (cref_fwd.UnifyInConstraintType(im.tt_empty) < 0)
						throw new Exception();
					return;
				}

				//				if (
				//#if PET_ENFORCES_LIST_TERM_TYPE==true
				//					term_type==tm.tt_empty &&
				//#endif
				//cref_fwd.UnifyInConstraintType(term_type) < 0)
				//					throw new TdlException(FilePos, "Error expanding list. Existing type {0} for feature {1} failed to unify with list type {2} ", cref_fwd.ConstraintType.Name, cref_fwd.Feature.ToUpper(), term_type.Name);

				//c_parts--;
				///////////////////////////////////////////////////////////////////////////////////////////////////////////////
				/// recurse down list parts
				///////////////////////////////////////////////////////////////////////////////////////////////////////////////
				//if (c_parts > 0)
				//if (Current.type_id != TdlTok.Type.AngleClose)
				//{
				//	AcceptList(cref_fwd);
				//	SetConstraint(cr, cref_fwd.Host);
				//}
				//else
				//	Nop.X();
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void AcceptDifferenceList(ConstraintRef cr, String last_coref)
		{
			if (cr.UnifyInConstraintType(im.tt_ne_list) < 0)
				throw new TdlException(FilePos, "Error expanding difference list. Existing type {0} for feature {1} failed to unify with list type {2} ", cr.ConstraintType.Name, cr.Feature.ToUpper(), im.tt_ne_list.Name);

			ConstraintRef cref_fwd = cr;

			cref_fwd = NextConstraint(cr, im.f_ix_list_head);
			AcceptType(cref_fwd, TdlTok.Type.TokMap_DlsCl_Comma);

			// select feature 'REST'
			SwitchToFeature(ref cref_fwd, im.f_ix_list_tail);

			if (cref_fwd.UnifyInConstraintType(im.tt_list) < 0)
				throw new TdlException(FilePos, "Error expanding difference list. Existing type {0} for feature {1} failed to unify with list type {2} ", cref_fwd.ConstraintType.Name, cref_fwd.Feature.ToUpper(), im.tt_list.Name);

			///////////////////////////////////////////////////////////////////////////////////////////////////////////
			/// recurse down list parts
			///////////////////////////////////////////////////////////////////////////////////////////////////////////
			if (Current.type_id == TdlTok.Type.Comma)
			{
				MoveNextThrow();

				AcceptDifferenceList(cref_fwd, last_coref);
				SetConstraint(cr, cref_fwd.Host);
				return;
			}

			//Current.Form = last_coref;
			Current.text = last_coref;
			AddCoref(Current, cref_fwd.FeatMark);
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public override String ToString()
		{
			String crf = corefs == null ? "(null)" : corefs.Count.ToString();
			return String.Format("{0} corefs: {1}", base.ToString(), crf);
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public partial class Tdl : TfsBuilder
		{
			///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
			/// 
			///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
			Tdl(IRtParent parent, Type t, Restrictor r, ConsDefs constraints)
				: base(parent, t, r)
			{
				this._top_edge = this.CreateEdge(t, false);

				foreach (IList<TdlTok> toks in constraints)
					_accept_base_feature_constraint(_top_edge, toks);

				if (corefs != null)
				{
					corefs._fixup_corefs(null);
					corefs = null;
				}
				base.c_act = base.EdgeCount;
			}

			///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
			/// 
			///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
			public unsafe override void GetIndexData(int ix1, arr_tfs_entry* pate)
			{
				Debug.Assert(ix1 > 0);

				*pate = entries[ix1 - 1];
				if (pate->hash_code == -1)
					*pate = default(arr_tfs_entry);
				else
					pate->_next32 = 0;

#if false
				int grm_idx = 0;
				if (g != null)
					grm_idx = g.grm_idx;
				pate->tgrm = new TaggedRelMode(grm_idx, RelMode.None);
				pate->next = 0;
#endif
			}

			///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
			/// <summary>
			/// entry point
			/// </summary>
			///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
			public static TfsBuilder.Tdl Build(IRtParent rts, Restrictor r, ConsDefs constraints, Type t = null)
			{
				Debug.Assert(t == null || t.ftm == r.FeatMgr);

				var e = constraints.GetEnumerator();
				while (e.MoveNext())
				{
					Type tcur = r.FeatMgr[e.Current[0].Text].introduced_by;
					if (tcur == t || tcur == null)
						continue;
					Debug.Assert(!tcur.IsTop);
					if (t == null)
						t = tcur;
					else if ((t = t.tu.UnifyTypesFull(t, tcur)) == null)
						throw new TdlException("inferred types for topmost features do not unify.");
				}
				Debug.Assert(t != null && t.HasAppropriateFeatures);

#if false
				TfsBuilder3 bn3 = new TfsBuilder3(t);
				bn3._accept_multiple_base_constraints(rg_toks);
#endif
				return new TfsBuilder.Tdl(rts, t, r, constraints);
			}
		};


	};
}
