﻿using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics;
using System.Linq;
using System.Reactive.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Markup;

using agree.schema;
using agree.rewriter;

using alib;
using alib.Array;
using alib.Collections;
using alib.Collections.ReadOnly;
using alib.Debugging;
using alib.Dictionary;
using alib.Enumerable;
using alib.Lattice;

namespace agree
{
	using rewriter.regex;
	using SysRegex = System.Text.RegularExpressions.Regex;
	using SysMatch = System.Text.RegularExpressions.Match;

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{ToString(),nq}", Name = "{mode_ix.ToString(),nq}")]
	public class LfrSlot : Element
	{
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public static LfrSlot[] None = alib.Collections.Collection<LfrSlot>.None;

		public LfrSlot(TfsSlot ts, LexicalFilteringRule lfr, ModeIx mix, int ci_index)
			: base(ts, mix, ci_index)
		{
			this.lfr = lfr;

			var L = lfr.match_raw;
			if (L != null)
			{
				int i = 0;
				while (i < L.Count)
				{
					var rx = L[i];
					if (rx.mode_ix != mix)
						i++;
					else
					{
						rx.match_index = lfr.RegexCount++;
						arr.Append(ref rgrx, rx);
						L.RemoveAt(i);
					}
				}
			}
		}
		readonly LexicalFilteringRule lfr;
		readonly Regex[] rgrx;

		public bool IsMatch(ITfsSlot ts_input)
		{
			if (rgrx != null)
			{
				for (int j = 0; j < rgrx.Length; j++)
				{
					var a = rgrx[j];

					var s_hyp = a.fsp[ts_input].StringValue;
					if (s_hyp == null)
						return false;

					var m = a.Match(s_hyp);
					if (!m.Success)
						return false;

					/// DELPH-IN token mapping rules (namely, ditch_punctuation_tmr) interpret
					/// ^a|b$ as ^(a|b)$ as opposed to ^a.*|.*b$. Thus "xb" should not result 
					/// in a match 'b'. We can effect this by discarding this match if it does 
					/// not bind the full input string.
					if (m.Groups[0].Value != s_hyp)
						return false;
				}
			}
			return true;
		}
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{ToString(),nq}")]
	public sealed class LexicalFilteringRule : RegexRewriterRule<LfrSlot, LfrSlot>
	{
		public LexicalFilteringRule(Grammar g, Type t, ConsDefs consdef)
			: base(g, t, consdef)
		{
		}

		protected override void AnalyzeRuleBehavior(Tfs tfs)
		{
			base.AnalyzeRuleBehavior(tfs);

			var tmg = tfs.em.TokenMappingGeometry;
			int ifeat_context = tmg.ifeat_plus_context;
			int ifeat_input = tmg.ifeat_plus_input;

			/// load context-sensitive processing info
			int ci_index = 0;

			Context = prepare_src_toks(tfs[ifeat_context], Element.Mode.CONTEXT, ref ci_index);
			Input = prepare_src_toks(tfs[ifeat_input], Element.Mode.INPUT, ref ci_index);
			Output = LfrSlot.None;

			Debug.Assert(repl_raw == null);
			if (match_raw != null)
			{
				/// all regexes must be claimed and accounted for
				Debug.Assert(match_raw.Count == 0);
				match_raw = null;
			}
		}

		LfrSlot[] prepare_src_toks(TfsSlot ts, Element.Mode mode, ref int ci_index)
		{
			TfsSlot[] ls = ts.ListSlots;
			if (ls.Length == 0)
				return LfrSlot.None;

			var arr = new LfrSlot[ls.Length];
			for (int i = 0; i < ls.Length; i++)
				arr[i] = new LfrSlot(ls[i], this, new ModeIx(mode, i), ci_index++);
			return arr;
		}
	};


	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class LexicalFilteringRewriter : rewriter.regex.RegexRewriter<LexicalFilteringRule, IChartObj<ChartSpan>, LfrSlot, LfrSlot>
	{
		public LexicalFilteringRewriter(IIdentity prv)
			: base(prv)
		{
		}

		public override IChartObj<ChartSpan> CreateElement(ITfsSlot ts) { throw not.valid; }

#if true
		protected override IEnumerable<Hypothesis> BuildHypotheses(LexicalFilteringRule rule)
		{
			if (Edges.Any(e => e.Source == null || e.Target == null))
				throw new Exception();

			foreach (var hyp in base.BuildHypotheses(rule))
			{
				if (!rule.HasRegex || rule.ContextInput.All((elem, ix) => elem.IsMatch(hyp[ix].data)))
					yield return hyp;

				else
					Nop.X();
#if false
				//see ERG 'lfr.tdl...'	-- try "What's the matter?" maybe ??
				if (rule.Name.EndsWith("_punct_lfr"))
				{
					var form = hyp[0].data.Next("ORTH.FORM").StringValue;

					if ((rule.Name[8] == 'l' ? left_punc_regex : right_punc_regex).IsMatch(form))
						yield return hyp;
				}
				else
				{
					yield return hyp;
				}
#endif
			}
		}
#endif
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public class LexicalFilterLatticeBuilder : m_scalar_base<IIdentList<IChartObj<ChartSpan>>, LexicalFilteringRewriter>
	{
		public LexicalFilterLatticeBuilder(IRtParent parent, IIdentList<IChartObj<ChartSpan>> objs)
			: base(parent, objs)
		{
		}

		protected override LexicalFilteringRewriter start(IIdentList<IChartObj<ChartSpan>> objs)
		{
			if (objs.Count == 0)
				throw new Exception("can't make a lattice with no chartobjs");

			var lfr = new LexicalFilteringRewriter(this);

			int end_idx = objs.Max(co => co.ProximalContext.EndIndex) + 1;

			var vert_map = new LexicalFilteringRewriter.LatticeVertex[end_idx + 1];
			int i = 0;
			vert_map[i] = lfr.StartVertex;
			while (++i < end_idx)
				vert_map[i] = lfr.NewVertex();
			vert_map[i] = lfr.EndVertex;

			foreach (var co in objs)
			{
				var ctx = co.ProximalContext;
				lfr.AddEdge(co, vert_map[ctx.StartIndex], vert_map[ctx.EndIndex + 1]);
			}
			return lfr;
		}
	};


	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public class LexicalFilterExecutor : m_scalar_base<LexicalFilteringRewriter, LexicalFilteringRewriter>,
		ρρEntryMgr,
		ρρTfsUnifier,
		ρρOutputItemsCount
	{
		public LexicalFilterExecutor(IRtParent parent, LexicalFilteringRewriter lfr)
			: base(parent, lfr)
		{
		}

		///////////////////////////////////////////////////////////////////////
		///
		EntryMgr em;
		public EntryMgr EntryMgr
		{
			get { return em; }
			set { em = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		TfsUnifier u;
		public TfsUnifier TfsUnifier
		{
			get { return this.u; }
			set { this.u = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		public int c_out;
		public int OutputItemsCount
		{
			get { return c_out; }
		}
		///
		///////////////////////////////////////////////////////////////////////

		protected override LexicalFilteringRewriter start(LexicalFilteringRewriter lfr)
		{
			lfr.TfsUnifier = this.u;

			var lf_rules = em._lexical_filtering_rules.__get_arr();

			if (lf_rules == null || lf_rules.Length == 0)
				throw new Exception("there are no lexical filtering rules.");

			lfr.Rewrite(lf_rules, em.TokenMappingGeometry.ifeat_plus_output);

			c_out = lfr.EdgeCount;

			return lfr;
		}
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class ChartObjLatticeExtractorF : composition.f_primitive<LexicalFilteringRewriter, IdentArray<IChartObj<ChartSpan>>>
	{
		public ChartObjLatticeExtractorF(IRtParent parent)
			: base(parent)
		{
		}

		protected override IdentArray<IChartObj<ChartSpan>> func(LexicalFilteringRewriter lat)
		{
			var reflist = new RefList<IChartObj<ChartSpan>>();

			foreach (var lattice_edge in lat.Edges)
				reflist.Add(lattice_edge.data);

			return new IdentArray<IChartObj<ChartSpan>>(lat, reflist);
		}
	};
}
