﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;

using agree.schema;

using alib;
using alib.Debugging;
using alib.Collections;
using alib.Enumerable;
using alib.Combinatorics;
using alib.Array;

namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// DownTrace : inverted tree of morphological transforms from the surface (top) towards the stem (leaves)
	/// Note that inbound functions can be identified by their MappingToken argument; outbound by IChartObj's.
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{_dbg_chain,nq}")]
	public abstract class DownTrace : identity_base
	{
		protected static readonly DownTrace[] None = Collection<DownTrace>.None;

		protected DownTrace(IIdentity prv, MorphologyExecutor mr, String form)
			: base(prv)
		{
			Debug.Assert(form.Length > 0);
			this.mr = mr;
			this.lex = mr.lex;
			this.form = form;
		}
		public DownTrace(DownTrace prev, String form)
			: this(prev.Trace, prev.mr, form)
		{
			this.prev = prev;
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		protected readonly Lexicon lex;

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		protected readonly MorphologyExecutor mr;

		public readonly DownTrace prev;
		public readonly String form;

		public virtual IEnumerable<IChartObj<ChartSpan>> OutboundExpand()
		{
			return Collection<IChartObj<ChartSpan>>.None;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Return zero or more analysis stacks for the specified surface form. Each lexical analysis stack that is 
		/// gathered has exactly one stem, and is built from the stem (at index 0) "upwards." The source token that is 
		/// supplied is advisory only in the sense that its text does not reflect the current state of morphological 
		/// processing. It is only used for matching the stem against multi-word lexical entries.
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public virtual IEnumerable<DownTrace> analyze()
		{
			bool f_irreg_only = lex.AgreeConfig.Morphology.IrregularFormsOnly;

			foreach (var x in irregular(ref f_irreg_only))
				yield return x;

			foreach (var x in regular(f_irreg_only))
				yield return x;

			foreach (var x in lex_lookup())
				yield return x;

#if false
			///	4. Check non-initial lemma in multi-word lexeme (only if there is a spelling change, otherwise handled 
			/// by step #3. Using HasSpellingChange means non-affixing irregulars are blocked here, so "world series" 
			/// will not generate a stack for plural 'series' in this step. If the requirement were changed to HasTransform 
			/// instead, the plural "world series" would be generated but at the expense of creating also another singular 
			/// one, duplicating a (presumably) singular one from step #3. The duplicates scenario seems like the worse 
			/// problem.
			if (HasSpellingChange)
				foreach (var x in mwe_lookup())
					yield return x;
#endif
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 1. Irregular affix (or non-affixing transform)
		/// At this point we are only allowing a single irregular inflection to be added to the downtrace, but
		/// this is now easy to reconfigure.
		/// propagate downwards towards stem first, and only unify with the stacks that come back, if any
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<DownTrace> irregular(ref bool f_irreg_only)
		{
			var irregs = lex.mph.irregs;
			if (irregs == null || irregs.irreg_fwd == null || this.HasIrregular)
				goto no_irregs;

			Irregulars.RuleSurf[] rgrs;
			if (irregs.irreg_fwd.TryGetValue(form, out rgrs))
				return rgrs.Select(iri => new DownTraceIrregular(this, iri));

		no_irregs:
			/// mere availability of an irregular form will block all regular affixing rules if 
			/// irregular-forms-only-p is enabled. Here, that didn't happen, so permit regulars now.
			f_irreg_only = false;
			return None;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 2. Regular, affixing lexical rules
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<DownTrace> regular(bool f_irreg_only)
		{
			foreach (MorphologicalRule rule in lex.mph.morph_lexrules)
			{
				foreach (MorphologySubrule subrule in rule.Subrules)
				{
					String newform = subrule.regex.Replace(form, subrule.replace);
					if (newform != form)
					{
						if (newform.Length == 0 && !lex.AgreeConfig.Morphology.AllowEmptyRegularForms)
							continue;

						/// irregular forms only?
						if (f_irreg_only && Surface.HasIrreg(rule, newform))
							continue;

						/// propagate downwards towards stem first, and only unify with the stacks that come back, if any
						yield return new DownTraceRegular(this, newform, rule, subrule);
					}
				}
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 3. single-lemma lexical lookup
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<DownTrace> lex_lookup()
		{
			yield return new DownTraceStem(this);
		}

		/// <summary>
		/// Are there any irregular inflections in this downtrace?
		/// </summary>
		public bool HasIrregular { get { return GetStack().Any(dt => dt is DownTraceIrregular); } }

		/// <summary>
		/// Are there any spelling changes in this downtrace?
		/// </summary>
		public bool HasSpellingChange { get { return GetStack().Any(dt => dt.form != form); } }

		public DownTraceSurface Surface
		{
			get
			{
				DownTrace dt = this;
				while (!(dt is DownTraceSurface))
					dt = dt.prev;
				return (DownTraceSurface)dt;
			}
		}

		/// <summary>
		/// Each DownTrace is polymorphic with the (reverse of) a linked list that it is at the end of. 
		/// This linked list which represents the path so far--in a tree of all such paths--from the surface 
		/// token in the current analysis descent.
		/// </summary>
		public IEnumerable<DownTrace> GetStack()
		{
			DownTrace dt = this;
			do
				yield return dt;
			while ((dt = dt.prev) != null);
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public String ShortName { get { return this.GetType().Name.Replace("DownTrace", ""); } }

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public String ShortInfo { get { return String.Format("{0} \"{1}\"", ShortName, form); } }

		public override String ToString() { return ShortInfo; }

#if DEBUG
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public String _dbg_chain
		{
			get { return GetStack().Select(dt => "[" + dt.ToString() + "]").StringJoin(" → "); }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
		protected DownTrace[] _dbg_display { get { return GetStack().ToArray(); } }
#endif
	};


	///////////////////////////////////////////////////////////////////////
	/// 
	public sealed class DownTraceSurface : DownTrace
	{
		public DownTraceSurface(MorphologyExecutor mr, IWordToken tok)
			: base(tok, mr, (String)P.SurfaceForm.GetValue(tok))
		{
			this.tok = tok;
		}
		public readonly IWordToken tok;
		/// any/all irregular form/rule tuples are recorded in the originating surface, where all derived downtrace
		/// branches can see the common set, so that regular affixing doesn't duplicate
		public List<DownTraceIrregular> irregs;

		public void AddIrreg(DownTraceIrregular dti)
		{
			if (irregs == null)
				irregs = new List<DownTraceIrregular>();
			irregs.Add(dti);
		}
		public bool HasIrreg(MorphologicalRule r, String form)
		{
			return irregs != null && irregs.Any(dti => dti.form == form && dti.rule == r);
		}
		public override String ToString()
		{
			return base.ToString() + " " + tok.ToString();
		}

		[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
		public TfsSlot _dbg_ts
		{
			get
			{
				var ts = tok as ITfsSlot;
				return ts != null ? new TfsSlot(ts) : default(TfsSlot);
			}
		}
	};
	/// 
	///////////////////////////////////////////////////////////////////////


	///////////////////////////////////////////////////////////////////////
	/// 
	public abstract class DownTraceTransform : DownTrace
	{
		public DownTraceTransform(DownTrace prev, String form, LexicalRule rule)
			: base(prev, form)
		{
			this.rule = rule;
		}
		public readonly LexicalRule rule;

		public sealed override IEnumerable<DownTrace> analyze()
		{
			return IsDuplicateTransform ? None : base.analyze();
		}

		public bool IsDuplicateTransform
		{
			get
			{
				DownTraceTransform dtt = this;
				while ((dtt = dtt.prev as DownTraceTransform) != null)
					if (dtt.rule == this.rule)
						return true;
				return false;
			}
		}

		public sealed override IEnumerable<IChartObj<ChartSpan>> OutboundExpand()
		{
			foreach (var pxo_top in mr.ExpandAnalyses(this))
			{
				var lic = pxo_top.ChartLicense;

				var lr = lic as LexicalRule;
				if (lr != null && !lr.IsCompatibleKeyMother(rule))
					continue;

				var pxo = mr.TryApplyLexicalRule(pxo_top, lic as LexicalEntry, rule);
				if (pxo != null)
					yield return pxo;
			}
		}

		public override String ToString()
		{
			return String.Format("{0} {1}", base.ToString(), rule.Name);
		}
	};
	/// 
	///////////////////////////////////////////////////////////////////////


	///////////////////////////////////////////////////////////////////////
	/// 
	public sealed class DownTraceIrregular : DownTraceTransform
	{
		public DownTraceIrregular(DownTrace prev, Irregulars.RuleSurf rs)
			: base(prev, rs.surf, rs.rule)
		{
			Surface.AddIrreg(this);
		}
	};
	/// 
	///////////////////////////////////////////////////////////////////////


	///////////////////////////////////////////////////////////////////////
	/// 
	public sealed class DownTraceRegular : DownTraceTransform
	{
		public DownTraceRegular(DownTrace prev, String form, LexicalRule rule, MorphologySubrule sr)
			: base(prev, form, rule)
		{
			this.sr = sr;
		}
		public readonly MorphologySubrule sr;
	};
	/// 
	///////////////////////////////////////////////////////////////////////


	///////////////////////////////////////////////////////////////////////
	/// 
	public sealed class DownTraceStem : DownTrace
	{
		public DownTraceStem(DownTrace prev)
			: base(prev, prev.form)
		{
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public override IEnumerable<IChartObj<ChartSpan>> OutboundExpand()
		{
			var mapping_tok = Surface.tok as MappingToken;

			//Console.WriteLine(dts._dbg_chain);

			if (mapping_tok == null)
				return lookup_simple();

			Debug.Assert(mapping_tok.TokenSpan == Surface.tok.TokenSpan);
#if true
			return lookup_native(mapping_tok).Concat(lookup_generic(mapping_tok));
#else
			return lookup_native(dts, mapping_tok);
#endif
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<IChartObj<ChartSpan>> lookup_generic(MappingToken mapping_tok)
		{
			var im = lex.im;

			foreach (var gle in lex.GenericLexicalEntries)
			{
				var rgpr = new Pairing<ITfsSlot>[2];
				rgpr[0] = new Pairing<ITfsSlot>(gle.TfsSlotTokens[im.FIRST], mapping_tok);
				rgpr[1] = new Pairing<ITfsSlot>(gle.Expanded[lex.em.TokenMappingGeometry.LexiconLastTokenPath], mapping_tok);

				var out_tfs = Unification.UnifySectionsIntoShell(gle.Expanded, rgpr);

				if (out_tfs != null)
				{
					out_tfs._set_trace(mr);
					Interlocked.Increment(ref mr.c_generic_out);

					yield return new lexent_itok(mr, gle, mapping_tok.TokenSpan, out_tfs);
				}
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		// multi-(mapping-)token vs. multi-word ?
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<IChartObj<ChartSpan>> lookup_native(MappingToken mapping_tok)
		{
			LexicalEntry[] rglx;
			if ((rglx = lex[form]) == null)
				yield break;

			var im = lex.im;
			int le_lemma_count;
			Tfs out_tfs;

			var input_toks = mr._tmp_toks;

			foreach (LexicalEntry le in rglx)
			{
				out_tfs = null;
				var sp = mapping_tok.TokenSpan;

				if ((le_lemma_count = le.words.Length) == 1)
				{
					if (le.TfsSlotTokens.Type == im.tt_list)
					{
						throw new Exception();
						//var tmp_tfs = mr.UnifySection(im.tt_ne_list.Expanded[im.FIRST], mapping_tok);
						//out_tfs = mr.UnifySection(le.TfsSlotTokens, tmp_tfs);
					}
					else
					{
						var rgpr = new Pairing<ITfsSlot>[2];
						rgpr[0] = new Pairing<ITfsSlot>(le.TfsSlotTokens[im.FIRST], mapping_tok);
						rgpr[1] = new Pairing<ITfsSlot>(le.Expanded[lex.em.TokenMappingGeometry.LexiconLastTokenPath], mapping_tok);

						out_tfs = Unification.UnifySectionsIntoShell(le.Expanded, rgpr);
					}

					if (out_tfs != null)
					{
						out_tfs._set_trace(mr);
						Interlocked.Increment(ref mr.c_native_out);

						yield return new lexent_itok(mr, le, sp, out_tfs);
					}
				}
				else if (sp.StartIndex + le_lemma_count <= mr.i_limit)
				{
					ITfsSlot[][] rg_seq = null;
					int i, j;

					for (i = 1; i < le_lemma_count; i++)
					{
						var cw = le.words[i];
						for (j = 0; j < input_toks.Length; j++)
						{
							var it = (MappingToken)input_toks[j];
							if (it.TokenSpan.Length == 1 && it.TokenSpan.StartIndex == sp.StartIndex + i && lex.lex_compare.Equals(it.Text, cw))
							{
								if (rg_seq == null)
									rg_seq = new ITfsSlot[le_lemma_count][];
								alib.Array.arr.Append(ref rg_seq[i], it);
							}
						}
						if (rg_seq == null || rg_seq[i] == null)
							goto no_mwe;
					}

					rg_seq[0] = new ITfsSlot[] { mapping_tok };

					foreach (var xp in _comb_ext.VariableCrossProduct(rg_seq))
					{
						out_tfs = Unification.UnifySectionsIntoShell(le.Expanded, install_mwe_token_list(le, xp));
						if (out_tfs != null)
						{
							out_tfs._set_trace(mr);
							Interlocked.Increment(ref mr.c_native_out);
							Debug.Print("mwe '{0}' -> {1} {2}", form, le.Name, le.WordsDisplay);

							yield return new lexent_itok(mr, le, new ChartSpan(sp.StartIndex, (uint)le_lemma_count), out_tfs);
						}
					}
				}
			no_mwe:
				;
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		Pairing<ITfsSlot>[] install_mwe_token_list(LexicalEntry le, IEnumerable<ITfsSlot> xp)
		{
			var rgpr = new Pairing<ITfsSlot>[le.words.Length * 2];
			ITfsSlot cur, prv = le.TfsSlotTokens;

			int i = 0;
			var e = xp.GetEnumerator();
			e.MoveNext();
			while (true)
			{
				rgpr[i++] = new Pairing<ITfsSlot>(prv.Next(lex.im.FIRST), cur = e.Current);

				if (!e.MoveNext())
					break;

				rgpr[i++] = new Pairing<ITfsSlot>(prv.Next(lex.im.REST), prv = new ArrayTfs(mr, lex.im.tt_ne_list.Expanded));
			}

			rgpr[i++] = new Pairing<ITfsSlot>(le.Expanded[lex.em.TokenMappingGeometry.LexiconLastTokenPath], cur);

			return rgpr;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// in non-token-mapping mode, the simple match of the word to the orthography (which is implicit 
		/// in the lookup already undertaken) is sufficient to license the lexical entry. Not specifying 
		/// a tfs in the following constructor ends up referencing the lexical entry's canonical expanded 
		/// TFS
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<IChartObj<ChartSpan>> lookup_simple()
		{
			LexicalEntry[] rglx;
			if ((rglx = lex[form]) == null)
				yield break;

			foreach (LexicalEntry le in rglx)
			{
				if (le.words.Length == 1)
				{
					Interlocked.Increment(ref mr.c_native_out);
					yield return new lexent_itok(mr, le, Surface.tok.TokenSpan);
				}
				else
				{
					/// see mwe_lookup() or lex_lookup() in downtrace.cs?
				}
			}
		}

#if false
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// Check non-initial lemma in multi-word lexeme. See downtrace.cs
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<DownTrace> mwe_lookup()
		{
			Debug.Print("mwe:" + this.form);
			yield break;

			MappingToken tok = null;

			foreach (var mwe in lex.mwe_lookup[form])
			{
				LexicalEntry le = mwe.lex_entry;
				tok = tok ?? Surface.tok;

				/// check to the left. There will always be at least one.
				foreach (var adj_left in tok.LeftAdjacencySequences()
											.Select(ie => ie.TakeTail(mwe.index).ToArray())
											.Where(rgt => rgt.Length == mwe.index))
				{
					for (int j = 0; j < mwe.index; j++)
						if (!AgreeConfig.Lexicon.LexicalComparison.Equals(adj_left[j].Text, le.words[j]))
							goto no_match;

					/// check to the right. There may be zero or more
					int rem = le.words.Length - (mwe.index + 1);
					if (rem == 0)
					{
						var sp = new ChartSpan(adj_left[0].TokenSpan.StartIndex, tok.TokenSpan.EndIndex);
						yield return new DownTraceStem(this, le, sp);
					}
					else
					{
						foreach (var adj_right in tok.RightAdjacencySequences()
													.Select(ie => ie.Take(rem).ToArray())
													.Where(rgt => rgt.Length == rem))
						{
							for (int j = 0; j < rem; j++)
								if (!AgreeConfig.Lexicon.LexicalComparison.Equals(adj_right[j].Text, le.words[mwe.index + 1 + j]))
									goto no_match;

							Nop.CodeCoverage();
							var sp = new ChartSpan(adj_left[0].TokenSpan.StartIndex, adj_right[rem - 1].TokenSpan.EndIndex);
							yield return new DownTraceStem(this, le, sp);
						}
					}
				no_match:
					;
				}
			}
		}
#endif
	};
	/// 
	///////////////////////////////////////////////////////////////////////
}