﻿using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Markup;
using System.Reactive.Linq;

using alib;
using alib.Debugging;
using alib.Enumerable;
using alib.Collections;
using alib.Observable;

using agree.schema;

namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class OrthLexentLookup : m_observable<IIdentList<IWordToken>, IChartObj<ChartSpan>>,
		ρρLexicon,
		ρρMinimalSpanCount
	{
		public OrthLexentLookup(IRtParent parent, IIdentList<IWordToken> toks)
			: base(parent, toks)
		{
			this.span_count = int.MinValue;
		}

		///////////////////////////////////////////////////////////////////////
		///
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		Lexicon lex;
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public Lexicon Lexicon
		{
			get { return lex; }
			set { lex = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		public int span_count = -1;
		public int MinimalSpanCount
		{
			get
			{
				if (span_count == int.MinValue)
					throw new ResultPendingException();
				return span_count;
			}
			set { throw alib.not.valid; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		protected override void start(IIdentList<IWordToken> words)
		{
			this.span_count = words.Max(_w => _w.TokenSpan.EndIndex) + 1;

			LexicalEntry[] rgle;
			LexicalEntry le;
			IWordToken wt;

			for (int i = 0; i < words.Count; i++)
				if ((rgle = lex[(wt = words[i]).Text]) != null)
					for (int j = 0; j < rgle.Length; j++)
						if ((le = rgle[j]).words.Length == 1)
							_item(new lexent_itok(this, le, wt.TokenSpan));
						else
							foreach (var ic in rightwards_mwe_solutions(words, le.words, 1))
								_item(new lexent_mwe(this, le, ic.Prepend(wt)));

			_remove_task();
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<_ICollection<IWordToken>> rightwards_mwe_solutions(IIdentList<IWordToken> words, String[] le_lemmas, int i)
		{
			if (i == le_lemmas.Length)
				yield return Collection<IWordToken>.NoneCollection;
			else
				foreach (var wt in words)
					if (wt.Text == le_lemmas[i])
						foreach (var ee in rightwards_mwe_solutions(words, le_lemmas, i + 1))
							yield return ee.Prepend(wt);
		}
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class TokenMappingLatticeAdapterF : composition.f_primitive<TokenMappingLattice, IdentArray<MappingToken>>
	{
		public TokenMappingLatticeAdapterF(IRtParent parent)
			: base(parent)
		{
		}

		protected override IdentArray<MappingToken> func(TokenMappingLattice lat)
		{
			var rl = new RefList<MappingToken>(lat.VertexCount + 1);

			foreach (var lattice_edge in lat.Edges)
			{
				/// todo: compute minimal spans from lattice topography

				Debug.Assert(lattice_edge.Source.Level == lattice_edge.Target.Level - 1);

				var map_tok = lattice_edge.data;

				map_tok.TokenSpan = new ChartSpan(lattice_edge.Source.Level, lattice_edge.Target.Level - 1);

				rl.Add(map_tok);
			}
			return new IdentArray<MappingToken>(lat, rl);
		}
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// 
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public abstract class MorphologyResources : m_observable<IActiveObj<IWordToken>, IChartObj<ChartSpan>>, IUnificationController,
		ρρLexicon
	{
		protected MorphologyResources(IRtParent parent, IActiveObj<IWordToken> toks)
			: base(parent, toks)
		{
		}

		///////////////////////////////////////////////////////////////////////
		/// 
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		internal Lexicon lex;
		protected Isometrics im;
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public Lexicon Lexicon
		{
			get { return lex; }
			set { this.im = (lex = value).em.im; }
		}
		/// 
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		protected QuickCheck qc;
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public QuickCheck QuickCheck
		{
			get { return qc; }
			set { qc = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////
		///
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		TfsUnifier fsu;
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public TfsUnifier TfsUnifier
		{
			get { return fsu; }
			set { fsu = value; }
		}
		/// 
		///////////////////////////////////////////////////////////////////////

		public int c_tok_in, c_generic_out, c_native_out;

		public int c_unif;

		public ArrayTfs UnifySection(TfsSlot ts_outer, ITfsSlot ts_inner, Tfs.Flags opts)
		{
			Interlocked.Increment(ref c_unif);
			var r = fsu.ftm.ParseRestrictors.FromTfsFlags(opts);
			var tfs = Unification.UnifySection(r, opts | r.opts, ts_outer, ts_inner);
#if UNIF_FAILURES
			if (tfs == null)
			{
				Unification.f_report_failures = true;
				Unification.UnifySection(fsu, 0, ts_outer.Tfs, ts_outer.ix1, ts_inner.Tfs, ts_inner.SlotIndex);
				Unification.f_report_failures = false;
			}
#endif
			if (tfs != null)
				tfs._set_trace(this);
			return tfs;
		}
	};


	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// <summary>
	/// Performs pre-parsing analysis of the tokens in a tokenization hypothesis, including lexical and irregular 
	/// stem lookup, application of morphological and lexical rules, filtering, etc. The hypothesis may be either
	/// single coverage (i.e. 'TokenizationHypothesis') or ad-hoc where token selection is deferred to the
	/// parsing phase (i.e. 'TokenizedString')
	/// 
	/// Find zero or more analysis stacks for each source token and add them to a bag. Note that we are mostly operating 
	/// from the surface form only, so the token 'tok' that is passed in is not referenced unless we need to check when 
	/// postulating a multi-word stem. Otherwise the analyzer doesn't care about how the tokens are laid out, 
	/// overlapping, or gapped in the source, and they are returned as a bag that is, in principle, unordered.
	/// 
	/// Morpholexical analysis happens in two passes. The surface-to-stem inbound pass ("downtrace") permutes affixation--
	/// regular or irregular--fanning downwards until one or more stems are reached. Then, for each discovered downtrace, 
	/// an outbound pass follows the downtrace in reverse checking the license requirements of the downtrace (aborting 
	/// if failing), interleaving non-affixing rules where possible, and permuting the skipping of downtrace elements. 
	/// Thus, at each node of the outbound pass, zero or more stacks fan out upwards.
	/// </summary>
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[ContentProperty("Tokens")]
	public sealed class MorphologyExecutor : MorphologyResources, IObserver<IWordToken>
	{
		public MorphologyExecutor(IRtParent parent, IActiveObj<IWordToken> toks)
			: base(parent, toks)
		{
		}

		///////////////////////////////////////////////////////////////////////
		/// <summary>
		/// The 'count' of IParseChartEdge items encapsulated in the LexicalAnalysis 
		/// enumerator is *not* necessarily the required size of the parse chart due 
		/// to overlapping tokens or multiple tokens for a given chart position; 
		/// use the ChartSize property for this.
		/// </summary>
		///
		public IActiveObj<IWordToken> Tokens
		{
			get { return t0; }
			set { t0 = value; }
		}
		///
		///////////////////////////////////////////////////////////////////////


		protected override void start(IActiveObj<IWordToken> toks)
		{
			lex.em.WaitForRemainingInits();
#if false
			toks.Subscribe(this);
#else
			_tmp_toks = toks.ToArrayWait();
			c_tok_in = _tmp_toks.Length;
			i_limit = _tmp_toks.Max(wt => wt.TokenSpan.EndIndex) + 1;
			for (int i = 0; i < c_tok_in; i++)
			{
				foreach (IChartObj<ChartSpan> co in ExpandAnalyses(new DownTraceSurface(this, _tmp_toks[i])))
					_item(co);
			}
			_remove_task();
#endif
		}
		public IWordToken[] _tmp_toks;
		public int i_limit;

		void IObserver<IWordToken>.OnNext(IWordToken tok)
		{
			_add_task();

			Interlocked.Increment(ref c_tok_in);

			foreach (IChartObj<ChartSpan> co in ExpandAnalyses(new DownTraceSurface(this, tok)))
				_item(co);

			_remove_task();
		}

		void IObserver<IWordToken>.OnCompleted() { _remove_task(); }

		void IObserver<IWordToken>.OnError(Exception error) { _error(error); }

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Each 'DownTrace' stack represents a sequence of licensed orthographic changes. These are permuted on the way 
		/// down from the surface towards the stem, recorded as an (inverted) tree as connected with other DownTraces.
		/// When unwinding the stack, non-affixing changes are further inserted into these trees, possibly multiplying 
		/// branches upwards at any point.
		/// 
		/// Enumerator calls serve as placeholders during the inbound (downtrace) for interleaving which may occur
		/// while outbound, since we can't predict such during the downtrace (there are no spelling changes to signal
		/// the need.)
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public IEnumerable<IChartObj<ChartSpan>> ExpandAnalyses(DownTrace dt_in)
		{
			return dt_in.analyze().SelectMany(dt => multiply_outbound(dt.form, dt.OutboundExpand()));
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// An arbitrary number of non-affixing rules can interleave. As long as we find any, multiply the returned
		/// stacks recursively upwards.
		/// also return the stack without any non-affixing lexical rules applied
		/// todo: is this only valid if it matches the surface form?
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<IChartObj<ChartSpan>> multiply_outbound(String form, IEnumerable<IChartObj<ChartSpan>> seq)
		{
			return seq.SelectMany(_in => multiply_outbound(form, outbound_non_affixing(form, _in)).Append(_in));
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Return all analyses corresponding to any valid non-affixing lexical rules applied to the top of the specified 
		/// analysis. If no further transformation can be generated on this stack, return an empty sequence.
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		IEnumerable<IChartObj<ChartSpan>> outbound_non_affixing(String form, IChartObj<ChartSpan> pxo_in)
		{
			var lic = pxo_in.ChartLicense;
			var le = lic as LexicalEntry;
			var lr = lic as LexicalRule;

			foreach (var rule in lr != null ? lr.NonAffixingCompatibleMothers : lex.mph.non_affixing_lexrules)
			{
				Debug.Assert(lr == null || lr.IsCompatibleKeyMother(rule));

				var pxo = TryApplyLexicalRule(pxo_in, le, rule);
				if (pxo != null)
					yield return pxo;
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// because each solution must draw on one lexical analysis ulitmately originating from every lexical entry, 
		/// it seems likely that many of the lexical analyses will actually be needed. So perhaps there's no point in 
		/// using the packing restrictor here. It follows that we wouldn't analyze subsumptions (i.e., pack) in the 
		/// morphology section. As such, we save work in the unpacker because it recognizes (and conserves) non-packed 
		/// TFSes, and empirical controlled performance tests with the ERG strongly prefer the mode.
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		internal IChartObj<ChartSpan> TryApplyLexicalRule(IChartObj<ChartSpan> pxo, LexicalEntry le, LexicalRule rule)
		{
			if (le != null && !le.IsLexRuleCompat(rule))
				return null;

			TfsSlot dtr = rule.DaughterNodes[0];

			if (qc != null && qc._QuickCheck(pxo.Tfs, rule.Expanded, dtr.out_mark))
				return null;

			ArrayTfs full;
			if ((full = UnifySection(dtr, pxo, Tfs.Flags.DeleteArgs)) == null)
				return null;
			full._set_trace(this);

			Debug.Assert(!full.SemanticsRestricted);

			var pxo_new = new pxo_unary(this, rule, full, pxo, null);
			pxo_new.EndTransact(AtomicSeqState.Available);
			return pxo_new;
		}
	};
}
