﻿//#define rewriter_debugging

using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics;
using System.Linq;
using System.Reactive.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Markup;

using alib;
using alib.Collections;
using alib.Collections.ReadOnly;
using alib.Debugging;
using alib.Dictionary;
using alib.Enumerable;

using agree.schema;
using agree.rewriter;

namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// Create and assign lattice vertices to a sequence of token mapping tokens
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class VertexTaggerF : composition.f_primitive<IIdentList<MappingToken>, TokenMappingLattice>
	{
		public VertexTaggerF(IRtParent parent)
			: base(parent)
		{
		}

		protected override TokenMappingLattice func(IIdentList<MappingToken> tokens)
		{
			var lat = new TokenMappingLattice(tokens);

			int i, c = tokens.Count - 1;
			if (c < 0)
				return lat;

			TokenMappingLattice.LatticeVertex cur, prev = lat.StartVertex;

			i = 0;
			do
			{
				lat.AddEdge(tokens[i], prev, cur = lat.NewVertex());

				prev = cur;
			}
			while (++i < c);

			lat.AddEdge(tokens[i], prev, lat.EndVertex);

			return lat;
		}
	};


	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// Process REPP tokens into YY-ish tokens.
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public class TokenMapper : m_scalar_base<TokenMappingLattice, TokenMappingLattice>,
		ρρEntryMgr,
		ρρTfsUnifier
	{
		public TokenMapper(IRtParent parent, TokenMappingLattice lat)
			: base(parent, lat)
		{
		}


		///////////////////////////////////////////////////////////////////////
		///
		EntryMgr em;
		public EntryMgr EntryMgr
		{
			get { return em; }
			set { em = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		TfsUnifier u;
		public TfsUnifier TfsUnifier
		{
			get { return this.u; }
			set { this.u = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////

		protected override TokenMappingLattice start(TokenMappingLattice lattice)
		{
			lattice.TfsUnifier = this.u;

			lattice.Config = em.AgreeConfig.TokenMapping.RewriterConfig;

			//Unification.f_report_failures = true;
#if rewriter_debugging
			tw = new System.IO.StreamWriter(@"C:\Users\glenn\Desktop\ccm1.txt", false, Encoding.UTF8);
			tw.AutoFlush = true;
			tw.WriteLine(@"reading `pet/english.set'... including `pet/common.set'... including `pet/global.set'... including `pet/repp.set'... including `pet/mrs.set'... loading `english.grm' 
(ERG (trunk)) 96098 types in 3.1 s

[cm] greatest item id before token mapping: 12");
#endif

			lattice.Rewrite(em._token_mapping_rules.__get_arr(), em.TokenMappingGeometry.ifeat_plus_output);

			return lattice;
		}
#if rewriter_debugging
#warning cleanup debugging code
		public static System.IO.StreamWriter tw;
#endif
	};


	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class TokenMappingLattice : rewriter.regex.RegexRewriter<TokenMappingRule, MappingToken, tmr.SrcElem, tmr.TgtElem>,
		ρρMinimalSpanCount
	{
		public TokenMappingLattice(IIdentity prv)
			: base(prv)
		{
		}

		public override MappingToken CreateElement(ITfsSlot ts) { return new MappingToken(this, ts); }

		protected override IEnumerable<Hypothesis> BuildHypotheses(TokenMappingRule rule)
		{
			var hypotheses = base.BuildHypotheses(rule);

			if (!rule.HasRegex)
				return hypotheses;

			return hypotheses.SelectNotNull(h =>
				{
					var th = TryMatch(rule, h);
					if (th != null)
					{
						//TokenMapper.tw.WriteLine("[cm] regex_match({0}, {1})",
						//	rule.ContextInput.Select(crx => crx.ModeIx.ToString() + crx.annots.Select(rx => "-" + rx.match_index.ToString() + "/" + rx.Pattern + "/").StringJoin("-")).StringJoin(","),
						//	h.Select(e => "\"" + e.data.Form + "\"").StringJoin(","));
						return th;
					}
#if false && rewriter_debugging
					var pairing = rule.ContextInput.Select(r_els =>
					{
						var hyp_el = h[r_els.Index];
						return new
						{
							rule_el = r_els,
							hyp_el,
							hyp_tok = hyp_el.data,
						};
					});

					var pr = pairing.First();
					TokenMapper.tw.WriteLine("[cm] checked {0}, arg {1} with chart item {2} ({3})",
						rule.Name,
						pr.rule_el.ModeIx.ToString(),//pairing.Select(pr => pr.rule_el.ModeIx).StringJoin(","),
						pr.hyp_el._id.ToString(),//pairing.Select(pr => pr.hyp_el._id).StringJoin(","),
						"`" + pr.hyp_tok.SurfaceForm + "'"//pairing.Select(pr => "`" + pr.hyp_tok.Form + "'").StringJoin(", ")
					);
#endif
					return null;
				});
		}

		TmrHypothesis TryMatch(TokenMappingRule rule, Hypothesis h)
		{
			var rgm = rule.MatchElements(h);

#if rewriter_debugging
			for (int i = 0; i < rule.ContextInput.Length; i++)
			{
				var srcelem = rule.ContextInput[i];
				var hypelem = h[srcelem.Index];

				var scm = h.__matches == null || srcelem.Any(rxe => h.__matches[rxe.match_index] == null) ? "checked" : "MATCHED";

				TokenMapper.tw.WriteLine("[cm] {0} {1}, arg {2} with chart item {3} ({4})",
						scm,
						rule.Name,
						srcelem.ModeIx.ToString(),
						hypelem._id.ToString(),
						"`" + hypelem.data.SurfaceForm + "'");

				if (scm == "checked")
					break;
			}
#endif

			return rgm == null ? null : new TmrHypothesis(this, rule, h, rgm);
		}

		public override string GetItemDisplay(LatticeItem item)
		{
			if (item is LatticeEdge)
				return ((LatticeEdge)item).data.SurfaceForm;
			return base.GetItemDisplay(item);
		}

		public sealed class TmrHypothesis : Hypothesis
		{
			public TmrHypothesis(TokenMappingLattice tmrw, TokenMappingRule rule, Hypothesis to_copy, Match[] rgm)
				: base(tmrw, rule, to_copy)
			{
				Debug.Assert(rgm.Length == rule.RegexCount);
				this.rgm = rgm;
			}
			readonly Match[] rgm;

			public override void ApplyModsToUnifyResult(Tfs unify_result)
			{
				rule.Apply(unify_result, rgm);
			}
		};

		///////////////////////////////////////////////////////////////////////
		///
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public int MinimalSpanCount
		{
			get { return EndVertex.Level; }
			set { throw alib.not.valid; }
		}
		///
		///////////////////////////////////////////////////////////////////////
	};
}
