﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;

using agree.schema;

using alib;
using alib.Collections;
using alib.Debugging;
using alib.Enumerable;

namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{ToString(),nq}")]
	public sealed class LexicalEntry : DemandExpandEntry, ILexeme, IString
	{
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Note: LexicalEntries are not expanded for the purpose of extracting the orthography. It is obtained from the entry 
		/// Definition, and thus in this design orthography changes cannot be unified-in as part of a grammar's type expansion.
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public LexicalEntry(Grammar g, Type t, ConsDefs consdef)
			: base(g, t, consdef)
		{
			/// Extract the orthography and cache it for parsing
			TfsSlot ts;
			var _def = base.Definition;
			if (_def == null)
				throw new Exception("All constraints on lexical entry are vacuous");

			if ((ts = _def[ftm.im.OrthPath]).IsValid)
			{
				String s = tu.GetStringValue(ts.FlagsId);
				if (s != null)
					words = new[] { s };
				else
				{
					var rgw = ts.ListSlots;
					words = new String[rgw.Length];
					bool f_any_null = false;
					for (int i = 0; i < rgw.Length; i++)
						f_any_null |= (words[i] = tu.GetStringValue(rgw[i].FlagsId)) == null;
					if (f_any_null)
						words = words.Where(w => w != null).ToArray();
				}
			}

			if (words == null)
				throw new TfsException("Lexical entry '{0}' does not have any orthography at the path '{1}'",
					Name,
					em.AgreeConfig.Grammar.OrthPath);
		}

		protected override Tfs EnsureExpanded()
		{
			var lex = em.lex;
			if (lex == null)
				throw new Exception("cannot expand a lexical entry before initializing EntryMgr.Lexicon");

			var exp = base.EnsureExpanded();

			int _ix;
			if (_rel_keys == null)
			{
				_rel_keys = exp._get_rel_keys(out _ix);
				if (_ix != -1)
					i_indexed_rel = _ix;
			}
			else
				Debug.Assert(_rel_keys.Cast<LexLookupKey>().SequenceEqual(exp._get_rel_keys(out _ix)));

			return exp;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary> lexical entry has no daughters </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public int DaughterArity { get { return 0; } }

		LexLookupKey[] _rel_keys;
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public LexLookupKey[] RelKeys
		{
			get
			{
				LexLookupKey[] _tmp;
				if ((_tmp = _rel_keys) == null)
				{
					var _ = this.Expanded;
					_tmp = _rel_keys;
				}
				return _tmp;
			}
		}

		int i_indexed_rel;
		public LexLookupKey IndexedRelation
		{
			get
			{
				LexLookupKey[] _tmp;
				if ((_tmp = _rel_keys) == null)
				{
					var _ = this.Expanded;
					_tmp = _rel_keys;
				}
				return _tmp.Length == 0 ? LexLookupKey.NotValid : _tmp[i_indexed_rel];
			}
		}

#if false
		Edge.Flag[] _rel_preds;
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public Edge.Flag[] RelPreds
		{
			get
			{
				if (_rel_preds == null)
					EnsureExpanded();
				return _rel_preds;
			}
		}
				/// <summary>
		/// lightweight pred extraction
		/// </summary>
		public unsafe Edge.Flag[] _get_rel_preds(Tfs exp)
		{
			Edge.Flag* p_base = stackalloc Edge.Flag[100], p = p_base;
			int mx, m = mrsm.FullRelsPath.GetMark(exp);
			while (m != 0 && exp.TryGetFlagsMark(im.f_ix_list_head, m, out mx) != 0)
			{
				*p++ = exp.TryGetFlagsMark(mrsm.ifeat_pred, mx, out mx) & Edge.Flag.IdResolve;

				if (exp.TryGetFlagsMark(im.f_ix_list_tail, m, out m) == 0 || m == 0)
					throw new Exception();
			}
			int c = (int)(p - p_base);
			if (c == 0)
				return Collection<Edge.Flag>.None;
			Edge.Flag[] _tmp = new Edge.Flag[c];
			p = p_base;
			for (int i = 0; i < c; i++)
				_tmp[i] = *p++;
			return _tmp;
		}

		Edge.Flag _get_rel_pred_or_carg(TfsSlot ts)
		{
			var q = ts[em.lex.ifeat_carg].EdgeFlag;
			if (q > 0)
				return q;

			return ts[mrsm.ifeat_pred].EdgeFlag;
		}

		public Edge.Flag IndexedRelation
		{
			get
			{
				var rels = RelPreds();
				if (rels.Length == 0)
					return Edge.Flag.Bottom;

				//if (rels.Length == 1)
				return rels[0];

				//Relation rep_rel = rels.FirstOrDefault(r => r is ICarg);
				//if (rep_rel != null)
				//	return rep_rel;

				//var rg_argn = rels.Where(r => r is IArgN || r is Arg0Relation).ToArray();
				//if (rg_argn.Length == 1)
				//	return rg_argn[0];

				//if (rg_argn.Length > 1)
				//	return rg_argn.ArgMin(r => r.Type._id);

				throw new Exception();
			}
		}
#endif

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public String[] words;

		///////////////////////////////////////////////////////////////////////
		/// 
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public ITokenizer Tokenizer
		{
			get { return em.lex.Tokenizer ?? em.DefaultTokenizer ?? SplitTokenizer.SpaceCharTokenizer; }
			set { throw not.valid; }
		}
		/// 
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		/// 
		public String ReadbackTokensForm(ITfsSlot ts)
		{
			return tmg.ReadbackTokensForm(ts, this) ?? ReferenceForm;
		}
		public String ReferenceForm
		{
			get
			{
				if (words == null || words.Length == 0)
					return String.Empty;
				if (words.Length == 1)
					return words[0];
				return this.Tokenizer.MakeSurface(words);
			}
		}
		public String Text
		{
			get { return ReferenceForm; }
			set { throw not.valid; }
		}
		/// 
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		[DebuggerDisplay("{WordsDisplay,nq}")]
		public String WordsDisplay
		{
			get
			{
				if (words == null || words.Length == 0)
					return String.Empty;
				if (words.Length == 1)
					return "[" + words[0] + "]";
				return words.Select(w => "[" + w + "]").StringJoin(" ");
			}
		}
		/// 
		///////////////////////////////////////////////////////////////////////

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public bool f_gen_ignore;

		public TfsSlot SemanticsSlot
		{
			get { return this.Expanded[mrsm.InitialSemanticsPath]; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public TfsSlot TfsSlotTokens
		{
			get
			{
				if (tmg == null)
					return TfsSlot.NotValid;
				return this.Expanded[tmg.LexiconTokensPath];
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public IEnumerable<skol_lexent> SkolemizeFor(GenerationInput input)
		{
			foreach (var skr in input.RelMatchHelper.EnumerateSkolemizationResults(input, this.SemanticsSlot))
			{
				var sl = new skol_lexent(input, this, skr);

				//ctrl.bp("skolemized lexical entry", sl);

				yield return sl;
			}
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		Tfs _dbg_only;
		public Tfs _debugger_expanded_tfs()
		{
			if (_dbg_only == null)
				_dbg_only = EnsureExpanded();
			return _dbg_only;
		}

		[DebuggerDisplay("┌─── Debugging-only version of expanded TFS ───┐", Name = "┌ ix1 ── mark/FEAT ┐")]
		int zz_dummy = 0;
		[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
		Tfs zz_exp_tfs { get { return _debugger_expanded_tfs(); } }

		public override String ToString()
		{
			return String.Format("{0} {1} {2}", Name.PadRight(20), InstanceType.Name.PadRight(20), WordsDisplay);
		}
	};

#if false
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public class LexicalEntryInstantiator : m_observable<IActiveObj<DownTraceStem>, IChartObj<ChartSpan>>, IObserver<DownTraceStem>,
		ρρLexicon,
		ρρEntryMgr,
		ρρTfsUnifier
	{
		public LexicalEntryInstantiator(IRtParent parent, IActiveObj<DownTraceStem> lat)
			: base(parent, lat)
		{
		}

		///////////////////////////////////////////////////////////////////////
		///
		EntryMgr em;
		public EntryMgr EntryMgr
		{
			get { return em; }
			set { em = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		TfsUnifier u;
		public TfsUnifier TfsUnifier
		{
			get { return this.u; }
			set { this.u = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		/// 
		public Lexicon lex;
		public Lexicon Lexicon
		{
			get { return lex; }
			set { lex = value; }
		}
		/// 
		///////////////////////////////////////////////////////////////////////


		//LookupGenericLEs lgles;
		//MorphologyExecutor ml;

		protected override void start(IActiveObj<DownTraceStem> lat)
		{
			if (this.lex == null)
				throw new RequiredPropertyNotSetException("Lexicon");
			if (this.u == null)
				throw new RequiredPropertyNotSetException("Unifier");
			if (this.em == null)
				throw new RequiredPropertyNotSetException("EntryMgr");

			//this.lgles = new LookupGenericLEs(this, em, u, this.u.AgreeConfig.TokenMapping.fsp_LexiconTokens);

			//this.ml = new MorphologyExecutor(this, null) { Lexicon = lex, TfsUnifier = u };

			//var co_lat = new ChartObjLattice(this);

			//t1.ConvertLatticeEdges(co_lat, mapping_token_to_lexent);

			//_item(co_lat);

			//t1.

			//_remove_task();
		}


		//		IEnumerable<IChartObj<ChartSpan>> mapping_token_to_lexent(TokenMappingLattice.LatticeEdge e)
		//		{
		//			//var map_tok = e.data;
		//			///// fix fix
		//			//Debug.Assert(e.Source.level == e.Target.level - 1);
		//			//map_tok.TokenSpan = new ChartSpan(e.Source.level, e.Target.level - 1);
		//#if true
		//			var ee = e.data;
		//#else
		//			var ee = e;
		//#endif
		//			foreach (var pxo in ml.AnalyzeToken(ee))
		//				yield return pxo;
		//			//if (map_tok.Trait == TokenMapper.Trait.Generic)
		//			//{
		//			//	foreach (var pxo in lgles.LookupForToken(map_tok))
		//			//		yield return pxo;
		//			//}
		//			//else if (map_tok.Trait == TokenMapper.Trait.Native)
		//			//{
		//			//	foreach (var pxo in ml.AnalyzeToken(map_tok))
		//			//		yield return pxo;
		//			//}
		//			//else
		//			//{
		//			//	throw new Exception("unknown mapping token type");
		//			//}
		//		}

		void IObserver<DownTraceStem>.OnNext(DownTraceStem dts)
		{
			//Console.WriteLine(dts._dbg_chain);

			var tok = dts.Surface.tok;

			var s_form = dts.form;

			LexicalEntry[] rglx;
			if ((rglx = lex[s_form]) == null)
			{
				return;
				//yield break;
			}

			Debug.Assert(rglx.Length > 0);

			//MappingToken[][] rg_seq = null;


			foreach (LexicalEntry le in rglx)
			{
				//var cs = new ChartSpan(tok);

				//Debug.Assert(cs.Equals(tok.TokenSpan));

				var pxo = new lexent_itok(this, le, (ChartSpan)P.TokenSpan.GetValue(tok));

				_item(pxo);
			}

		}

		void IObserver<DownTraceStem>.OnCompleted() { _remove_task(); }

		void IObserver<DownTraceStem>.OnError(Exception error) { _error(error); }
	};
#endif
}