﻿using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;

using agree.schema;

using alib.Debugging;
using alib.Enumerable;

namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public abstract class EntryMgrEntrySets : has_grammar_base
	{
		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		protected static StringComparer cmp_entry_name { get { return StringComparer.OrdinalIgnoreCase; } }

		public EntryMgrEntrySets(IRtParent parent)
			: base(parent)
		{
			var _this = (EntryMgr)this;

			this.rges = new EntrySet[Enum.GetValues(typeof(TdlType)).Length];

			rges[(int)TdlType.SyntaxRule] = _syntax_rules = new SyntaxRuleSet(_this);
			rges[(int)TdlType.LexicalRule] = _lexical_rules = new LexicalRuleSet(_this);
			rges[(int)TdlType.LexicalEntry] = _lexical_entries = new LexicalEntrySet(_this);
			rges[(int)TdlType.GenericLexicalEntry] = _generic_lexical_entries = new GenericLexicalEntrySet(_this);
			rges[(int)TdlType.StartSymbol] = _start_symbols = new StartSymbolSet(_this);
			rges[(int)TdlType.NodeLabel] = _node_labels = new NodeLabelSet(_this);
			rges[(int)TdlType.TriggerRule] = _trigger_rules = new TriggerRuleSet(_this);
			rges[(int)TdlType.TokenMappingRule] = _token_mapping_rules = new TokenMappingRuleSet(_this);
			rges[(int)TdlType.LexicalFilteringRule] = _lexical_filtering_rules = new LexicalFilteringRuleSet(_this);

			entry_dict = new Dictionary<String, Entry>(cmp_entry_name);
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		readonly EntrySet[] rges;
		public EntrySet this[TdlType _type] { get { return rges[(int)_type]; } }

		public SyntaxRuleSet _syntax_rules;
		public LexicalRuleSet _lexical_rules;
		public LexicalEntrySet _lexical_entries;
		public GenericLexicalEntrySet _generic_lexical_entries;
		public StartSymbolSet _start_symbols;
		public NodeLabelSet _node_labels;
		public TriggerRuleSet _trigger_rules;
		public TokenMappingRuleSet _token_mapping_rules;
		public LexicalFilteringRuleSet _lexical_filtering_rules;

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public Dictionary<String, Entry> entry_dict;

		public ICollection<Entry> AllEntries { get { return entry_dict.Values; } }

		public Entry this[String s]
		{
			get
			{
				Entry e;
				return entry_dict != null && entry_dict.TryGetValue(s, out e) ? e : null;
			}
		}
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public abstract class EntryMgrTdlLoader : EntryMgrEntrySets
	{
		public EntryMgrTdlLoader(IRtParent parent)
			: base(parent)
		{
		}

		protected void load_entry_groups()
		{
			Parallel.Invoke
			(
				() => CreateEntries(itd_rules),
				() => CreateEntries(itd_lexrules),
				() => CreateEntries(itd_lexent),
				() => CreateEntries(itd_generics),
				() => CreateEntries(itd_labels),
				() => CreateEntries(itd_triggers),
				() => CreateEntries(itd_tmrs),
				() => CreateEntries(itd_roots),
				() => CreateEntries(itd_lfrs)
			);
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_rules;
		public IActiveObj<IdentifierTokenGroup> SyntaxRuleTokenGroups
		{
			get { return this.itd_rules; }
			set { this.itd_rules = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_lexrules;
		public IActiveObj<IdentifierTokenGroup> LexicalRuleTokenGroups
		{
			get { return this.itd_lexrules; }
			set { this.itd_lexrules = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public IActiveObj<IdentifierTokenGroup> itd_lexent;
		public IActiveObj<IdentifierTokenGroup> LexicalEntryTokenGroups
		{
			get { return this.itd_lexent; }
			set { this.itd_lexent = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		public IActiveObj<IdentifierTokenGroup> itd_generics;
		public IActiveObj<IdentifierTokenGroup> GenericLexicalEntryTokenGroups
		{
			get { return this.itd_generics; }
			set { this.itd_generics = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_labels;
		public IActiveObj<IdentifierTokenGroup> NodeLabelTokenGroups
		{
			get { return this.itd_labels; }
			set { this.itd_labels = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_triggers;
		public IActiveObj<IdentifierTokenGroup> TriggerRuleTokenGroups
		{
			get { return this.itd_triggers; }
			set { this.itd_triggers = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_tmrs;
		public IActiveObj<IdentifierTokenGroup> TokenMappingRuleTokenGroups
		{
			get { return this.itd_tmrs; }
			set { this.itd_tmrs = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_roots;
		public IActiveObj<IdentifierTokenGroup> StartSymbolTokenGroups
		{
			get { return this.itd_roots; }
			set { this.itd_roots = value; }
		}

		[DebuggerBrowsable(DebuggerBrowsableState.Never)]
		IActiveObj<IdentifierTokenGroup> itd_lfrs;
		public IActiveObj<IdentifierTokenGroup> LexicalFilteringRuleTokenGroups
		{
			get { return this.itd_lfrs; }
			set { this.itd_lfrs = value; }
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		String find_unused_entry_name(String entry_name)
		{
			entry_name += "__";
			int i = 1;
			String s_name;
			while (entry_dict.ContainsKey(s_name = entry_name + i.ToString()))
				i++;
			return s_name;
		}
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// initialize Entry dictionary with all Entry symbol names
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void CreateEntries(IActiveObj<IdentifierTokenGroup> rg_tdg)
		{
			if (rg_tdg == null)
				return;

			rg_tdg.Subscribe(tdg =>
			{
				//if (tdg.f_append)
				//	TdlTokenizer.ErrorExit(id_tok, "Cannot append type information in an entry description.");

				var id_tok = tdg.constraints.identifier;

				var tt_cur = get_entry_type(tdg);

				lock (entry_dict)
				{
					if (entry_dict.ContainsKey(id_tok.text))
					{
#if true				// keep all repeated entries
						id_tok.text = find_unused_entry_name(id_tok.text);
#elif true				// re-definition of an entry replaces previous entry
						entry_dict.Remove(id_tok.i_s);
#elif true				// re-definition of an entry is ignored; first version is retained
						continue;							
#else					// re-definition of an entry is an error
						TdlTokenizer.ErrorExit(id_tok, String.Format("Symbol {0} is already defined.", id_tok.i_s));
#endif
					}
				}

				var e = create_entry_from_tdl_group(tdg, tt_cur);

				lock (entry_dict)
					entry_dict.Add(id_tok.text, e);
			},
			() =>
			{
				/// can read TDL hash now that all TDL has been forced through the pipeline
				ρρTdlHash ths;
				ρρTdlType tts;
				if ((tts = rg_tdg as ρρTdlType) != null && (ths = rg_tdg as ρρTdlHash) != null)
					this[tts.TdlType].update_hash(ths.TdlHash);
			});
		}
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		Type get_entry_type(IdentifierTokenGroup tdg)
		{
			var t_cur = td.ΔTop;

			foreach (TdlTok tok_par in tdg.κκParents)
			{
				Type t_par;
				if (!td.TryGetType(tok_par.text, out t_par))
					TdlTokenizer.ErrorExit(tok_par, String.Format("Entry description uses undefined parent type {0}", tok_par.text));

				if ((t_cur = tu.UnifyTypesFull(t_cur, t_par)) == null)
					TdlTokenizer.ErrorExit(tok_par, String.Format("Parent types {0} specified for entry description {1} do not unify",
						tdg.κκParents.Select(t => t.text).StringJoin(" "),
						tdg.constraints.identifier.text));
			}
			return t_cur;
		}
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		Entry create_entry_from_tdl_group(IdentifierTokenGroup tdg, Type tt_cur)
		{
			ConsDefs consdef = tdg.constraints;

			var mtdg = tdg as MorphologyIdentifierTokenGroup;
			if (mtdg != null && consdef.tdl_type != TdlType.LexicalRule)
				throw new Exception("Morphology subrules may only be defined on lexical rules.");

			Entry e;
			switch (consdef.tdl_type)
			{
				case TdlType.SyntaxRule:
					//_syntax_rules.AllocId();
					e = new SyntaxRule(g, tt_cur, consdef);
					_syntax_rules.Add(e);
					break;

				case TdlType.LexicalRule:
					//_lexical_rules.AllocId();
					if (mtdg == null)
						e = new LexicalRule(g, tt_cur, consdef);
					else
						e = new MorphologicalRule(g, tt_cur, mtdg.constraints, mtdg.morph_subrules);
					_lexical_rules.Add(e);
					break;

				case TdlType.LexicalEntry:
					//_lexical_entries.AllocId();
					e = new LexicalEntry(g, tt_cur, consdef);
					_lexical_entries.Add(e);
					break;

				case TdlType.GenericLexicalEntry:
					//_generic_lexical_entries.AllocId();
					e = new GenericLexicalEntry(g, tt_cur, consdef);
					_generic_lexical_entries.Add(e);
					break;

				case TdlType.StartSymbol:
					//_start_symbols.AllocId();
					e = new StartSymbol(g, tt_cur, consdef);
					_start_symbols.Add(e);
					break;

				case TdlType.NodeLabel:
					{
						var pn = tdg.κκParents;
						//_node_labels.AllocId();
						var nlc = AgreeConfig.NodeLabels;
						if (pn.Count == 0 || nlc.SimpleTreeDisplay || pn.Any(tk => cmp_entry_name.Compare(tk.text, nlc.LabelTemplateType) == 0))
							e = new NodeLabelTemplate(g, tt_cur, consdef);
						else if (pn.Any(tk => cmp_entry_name.Compare(tk.text, nlc.MetaTemplateType) == 0))
							e = new NodeMetaTemplate(g, tt_cur, consdef);
						else
						{
							e = null;
							String msg = String.Format(
								"'{0}' must be either a label template derived from type '{1}' or a meta template derived from type '{2}'",
								consdef.identifier.text,
								nlc.LabelTemplateType,
								nlc.MetaTemplateType);
							TdlTokenizer.ErrorExit(consdef.identifier, msg);
						}
						_node_labels.Add(e);
					}
					break;

				case TdlType.TriggerRule:
					//_trigger_rules.AllocId();
					e = new TriggerRule(g, tt_cur, consdef);
					_trigger_rules.Add(e);
					break;

				case TdlType.TokenMappingRule:
					//_token_mapping_rules.AllocId();
					e = new TokenMappingRule(g, tt_cur, consdef);
					_token_mapping_rules.Add(e);
					break;

				case TdlType.LexicalFilteringRule:
					//_lexical_filtering_rules.AllocId();
					e = new LexicalFilteringRule(g, tt_cur, consdef);
					_lexical_filtering_rules.Add(e);
					break;

				default:
					throw alib.not.valid;
			}
			return e;
		}
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	};
}
