﻿//#define SVD

using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;

using agree.configuration;
using agree.schema;

using alib;
using alib.BitArray;
using alib.Debugging;
using alib.Enumerable;
using alib.Hashing;
using alib.IO;
using alib.Math;
using alib.Matrix;

namespace agree
{
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// 
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	[DebuggerDisplay("{ToString(),nq}")]
	public sealed class FeatureInfo : IIdentity
	{
		public static readonly FeatureInfo[] None = alib.Collections.Collection<FeatureInfo>.None;
		public static readonly FeatureInfo RootNode = new FeatureInfo("(TFS root)", -1, null);
		public static readonly FeatureInfo Invalid = new FeatureInfo("(invalid)", int.MaxValue, null);

		public FeatureInfo(String feature, int i_feat, Type introduced_by)
		{
			this.i_feat = i_feat;
			this.feature = feature;
			this.introduced_by = introduced_by;

			// set a temporary value in case of use during initialization prior to maximal types becoming available
			this.OrderIndex = i_feat;
		}

		public int i_feat;
		public String feature;
		public Type introduced_by;
		public Type maximal_well_formed_type;
		public int OrderIndex;

		public String FEATURE { get { return feature.ToUpper(); } }

#if DEBUG
		public override String ToString()
		{
			return String.Format("{0,3} {1,12} {2,27} {3,27} {4:X8}",
				i_feat,
				feature == null ? "(null)" : feature.ToUpper(),
				introduced_by == null ? "(null)" : introduced_by.Name,
				maximal_well_formed_type == null ? "(null)" : maximal_well_formed_type.Name,
				OrderIndex);
		}
#endif
		IIdentity IIdentity.Trace { get { return introduced_by; } }
	};

	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	/// required property init:
	///		TypeLattice
	///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
	public sealed class FeatMgr : has_grammar_base, IIdentList<FeatureInfo>, ISupportInitializeNotification,
		IIndexedHash<FeatureInfo>,
		IIndexedHash<String>,
		ρρParseRestrictors,
		ρρGenerationRestrictors
	{
		public FeatMgr(IRtParent parent)
			: base(parent)
		{
		}

		public Restrictor r_none;
		Restrictors _p_restrictors, _g_restrictors;

		public Restrictors ParseRestrictors
		{
			get { return _p_restrictors; }
			set { throw not.valid; }
		}

		public Restrictors GenerationRestrictors
		{
			get { return _g_restrictors; }
			set { throw not.valid; }
		}

		public int GetFeatureIndex(String f)
		{
			FeatureInfo fi;
			return feat_map.TryGetValue(f, out fi) && fi != null ? fi.i_feat : -1;
		}

		public int[] GetFeatureArray(String[] rgs, out int hc)
		{
			if (rgs == null)
			{
				hc = 0;
				return alib.Collections.Collection<int>.None;
			}
			hc = rgs.Length;

			int c = hc;
			int[] rg_fix = new int[c];
			for (int fix, i = 0; i < c; i++)
			{
				FeatureInfo fi;
				if (!feat_map.TryGetValue(rgs[i], out fi))
				{
					alib.Array.arr.Resize(ref rg_fix, i);
					hc ^= c ^ i;
					break;
				}
				fix = fi.i_feat;
				rg_fix[i] = fix;
				Debug.Assert(fix.GetHashCode() == fix);
				hc ^= fix << ((i + 6) % 32);
			}
			return rg_fix;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

		public Dictionary<String, FeatureInfo> feat_map;

		public FeatureInfo[] feat_arr;

		public FeatureInfo this[String s_feat]
		{
			get { return feat_map[s_feat]; }
		}
		public FeatureInfo this[int i_feat]
		{
			get
			{
				if ((uint)i_feat >= feat_arr.Length)
					return i_feat == -1 ? FeatureInfo.RootNode : FeatureInfo.Invalid;
				return feat_arr[i_feat];
			}
		}
		public int this[FeatureInfo to_find] { get { return to_find.i_feat; } }

		public int Count { get { return feat_arr.Length; } }

		public IEnumerator<FeatureInfo> GetEnumerator() { return feat_arr.Enumerator(); }

		IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); }

		public IComparer<int> FeatureDisplayOrderComparer;
		public FeatureConfig[] rg_fc;
		public FeatureConfig Empty;
		public FeatureConfig EmptyWithFeaturesBelow;

		public int[][] rgrgfix_by_type;

		public void EndInit()
		{
			var map = load_tdl_types(bcpo.type_arr);

#if false
			var rgba = _get_preliminary_feature_configurations(type_arr, feat_arr);

			_optimize_feature_order(rgba, feat_arr);
#endif

			int c_feat = finalize_feature_tables(map);
			if (c_feat == 0)
				return;

			this.rg_fc = _build_feature_configs(feat_arr);

			_report();

			this.feat_map = feat_arr.ToDictionary(fi => fi.feature, fi => fi, StringComparer.OrdinalIgnoreCase);

			this.rgrgfix_by_type = bcpo.type_arr.Select(t => t.fc.rg_fix).ToArray();

			this.FeatureDisplayOrderComparer = new _feat_display_order_comparer(feat_arr);

			setup_restrictors();
		}

		void setup_restrictors()
		{
			r_none = new Restrictor(this);
			_p_restrictors = new Restrictors(this, this, "parse_restrictor", r_none);
			_g_restrictors = new Restrictors(this, this, "generation_restrictor", r_none);

			var af = ConvertAll(AgreeConfig.Grammar.DeletedDaughters);

			_p_restrictors.SetRestrictors(af, ConvertAll(AgreeConfig.Parser.Chart.PackingRestrictors));

			_g_restrictors.SetRestrictors(af, ConvertAll(AgreeConfig.Generator.Chart.PackingRestrictors));
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// create a mapping of feature to introducing type, check for multiple introduction, and
		/// incidentally determine the count of distinct features
		/// 
		/// Validate appropriateness conditions and determine 
		/// maximal types for all features. Ensure the validity of the type hierarchy according to certain properties of 
		/// the formalism.
		/// References:
		/// Ann Copestake "The Compleat LKB" 1993, p. 93.
		/// Carroll, Copestake, Malouf, Oepen - LKB checktypes.lsp
		/// Ann Copestake "Implementing Typed Feature Structure Grammars" 2002
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		Dictionary<String, Type> load_tdl_types(Type[] types)
		{
			var mt_map = new Dictionary<String, Type>();

			foreach (Type t in types)
				if (t.m_consdefs != null)
				{
					foreach (TdlTokenList bfc in t.m_consdefs)
					{
						TdlTok tk = bfc[0];
						String s_feat = null;
						if (tk.type_id == TdlTok.Type.AngleOpen)
							s_feat = AgreeConfig.Types.ListHead;  // foo := bar & < synsem, synsem >.   (Jacy)
						else if (tk.type_id != TdlTok.Type.Identifier)
							TdlTokenizer.ErrorExit(tk, "Expected an identifier");
						else
							s_feat = tk.Text;

						Type max_type;
						if (!mt_map.TryGetValue(s_feat, out max_type))
							mt_map.Add(s_feat, t);
						else if (!t.bit_vec.FastTest(max_type.bit_vec))
							TdlTokenizer.ErrorExit(String.Format(
	"Feature '{0}' cannot be introduced by type '{1}'; it was already introduced by type '{2}'.",
								s_feat.ToUpper(),
								t.Name,
								max_type.Name));
					}
					if ((t.m_flags & Type.Flags.HasAppropriateFeatures) == 0)
					{
						t.m_flags |= Type.Flags.HasAppropriateFeatures;
						t._set_has_features_below();
					}
				}
				else if (t.parents.Any(p => p.HasAppropriateFeatures))
					t.m_flags |= Type.Flags.HasAppropriateFeatures | Type.Flags.FeaturesBelow;
			return mt_map;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// finalize feature reference tables
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		int finalize_feature_tables(Dictionary<String, Type> mt_map)
		{
			int c_feat = mt_map.Count;
			//this.feat_map = new Dictionary<String, FeatureInfo>(c_feat, StringComparer.OrdinalIgnoreCase);
			this.feat_arr = new FeatureInfo[c_feat];

			int i_feat = 0;

			foreach (var kvp in mt_map /*.OrderBy(kvp => Array.IndexOf<String>(erg_ord, kvp.Key) */)
			{
				String s_feat = kvp.Key;
				Type max_type = kvp.Value;

				/// add the feature and associate it with its maximal introducing type
				FeatureInfo fi = new FeatureInfo(s_feat, i_feat, max_type);
				feat_arr[i_feat] = fi;
				i_feat++;
			}
			return i_feat;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		static BitArr[] _get_preliminary_feature_configurations(Type[] type_arr, FeatureInfo[] feat_arr)
		{
			int c_feat = feat_arr.Length;
			var mfbt = _get_maximal_feature_bitarrs(type_arr, feat_arr);

			HashSet<BitArr> prelim = new HashSet<BitArr>();
			foreach (Type t in type_arr)
			{
				if (!t.HasAppropriateFeatures)
				{
					Debug.Assert(t.m_consdefs == null);
					continue;
				}
				else
				{
					BitArr ba;
					if ((ba = mfbt[t._id]) == null)
						ba = mfbt[t._id] = new BitArr(c_feat);
					foreach (Type par in t.parents)
						if (par.HasAppropriateFeatures)
							ba.OrEq(mfbt[par._id]);

					prelim.Add(ba);
				}
			}
			return prelim.ToArray();
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		static BitArr[] _get_maximal_feature_bitarrs(Type[] type_arr, FeatureInfo[] feat_arr)
		{
			int c_feat = feat_arr.Length;
			BitArr[] mfbt = new BitArr[type_arr.Length];
			for (int i = 0; i < feat_arr.Length; i++)
			{
				FeatureInfo fi = feat_arr[i];
				if (fi.introduced_by != null)
					new_bitarr_if_null(ref mfbt[fi.introduced_by._id], c_feat)[i] = true;
			}
			return mfbt;
		}

		static BitArr new_bitarr_if_null(ref BitArr ba, int c_feat) { return ba ?? (ba = new BitArr(c_feat)); }

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// Add features to a maximal introduction table
		/// propagate from parents to children by traversing in topological order
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		FeatureConfig[] _build_feature_configs(FeatureInfo[] feat_arr)
		{
			int c_feat = feat_arr.Length;
			Empty = new FeatureConfig(this, new BitArr(c_feat), 0);
			EmptyWithFeaturesBelow = new FeatureConfig(this, new BitArr(c_feat), 0);

			var mfbt = _get_maximal_feature_bitarrs(bcpo.type_arr, feat_arr);

			var pod = new Dictionary<BitArr, FeatureConfig>((int)(c_feat / 1.414));
			pod.Add(Empty.ba, Empty);

			Debug.Assert(Empty.fc_ix == 0);
			int fc_ix = 1;

			foreach (Type t in bcpo.type_arr)
			{
				FeatureConfig fc;
				if (!t.HasAppropriateFeatures)
				{
					Debug.Assert(t.m_consdefs == null);
					fc = t.HasFeaturesBelow ? EmptyWithFeaturesBelow : Empty;
				}
				else
				{
					BitArr ba;
					if ((ba = mfbt[t._id]) == null)
						ba = mfbt[t._id] = new BitArr(c_feat);
					foreach (Type par in t.parents)
					{
						par._set_has_features_below();
						if (par.HasAppropriateFeatures)
							ba.OrEq(par.fc.ba);
					}

					if (!pod.TryGetValue(ba, out fc))
						pod.Add(ba, fc = new FeatureConfig(this, ba, fc_ix++));
				}
				(t.fc = fc).c_refs++;
			}

			FeatureConfig[] rg_fc = new FeatureConfig[pod.Count];
			foreach (FeatureConfig fc in pod.Values)
				rg_fc[fc.fc_ix] = fc;
			return rg_fc;
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void set_maximal_well_formed_types()
		{
			int i, c = feat_arr.Length;
			var keys = new ulong[c];
			var ixs = new int[c];

			for (i = 0; i < c; i++)
				keys[i] = set_max_wf(ref feat_arr[ixs[i] = i]);

			Array.Sort<ulong, int>(keys, ixs);

			for (i = 0; i < ixs.Length; i++)
				feat_arr[ixs[i]].OrderIndex = i;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		ulong set_max_wf(ref FeatureInfo fi)
		{
			ulong k = (uint)fi.i_feat;

			var T_intr = fi.introduced_by;
			if (T_intr != null)
			{
				/// (note that types can introduce feature(s) with *top*, and constrain nothing else)
				Edge.Flag f = 0;
				if ((T_intr.m_flags & Instance.Flags.HasLocalConstraints) != 0)
				{
					Tfs exp = T_intr.Expanded;
					Debug.Assert(exp._top_edge.Mark == 1);
					int nm;
					f = exp.TryGetFlagsMark(fi.i_feat, 1, out nm);
				}
				k |= (ulong)T_intr._id << 42;

				var T_max = fi.maximal_well_formed_type = f == 0 ? td.ΔTop : tu.GetEdgeType(f);
				k |= (ulong)T_max._id << 20;

#if false
				if (f == 0)
					Console.Out.WriteLineColor("{0} introduces feature $cyan {1}$ with $red no constraint.", T_intr.Name, fi.feature.ToUpper());
				else
					Console.Out.WriteLineColor("{0} introduces feature $cyan {1}$ with constraint $yellow {2}.", T_intr.Name, fi.feature.ToUpper(), Edge.TypeInfoFromFlag(tm, f));
#endif
			}
			return k;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// singular value decomposition of the feature-usage mapping orients the high-dimensional feature space in
		/// in the direction of greatest variation, optimizing the co-location of related features
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		static void _optimize_feature_order(BitArr[] configs, FeatureInfo[] feat_arr)
		{
#if SVD
			int c_feat = feat_arr.Length;
			int c_cfgs = configs.Length;
			double[,] a = new double[c_cfgs, c_feat];
			for (int i = 0; i < c_cfgs; i++)
				foreach (int i_feat in configs[i].OnesPositions())
					a[i, i_feat] = 1;

			double[] w = null;			/// singular values
			double[,] _u = null;		/// left singular vectors (not computed)
			double[,] _vt = null;		/// right singular vectors
			if (!alglib.svd.rmatrixsvd(a, c_cfgs, c_feat, 0, 1, 2, ref w, ref _u, ref _vt))
				return;		/// probably non-convergence; see 'maxitr'

			/// we need the optimal 1-dimensional ordering, so just use the first dimension. Because we are only taking
			/// one row, there's no need to multiply it by the first singular value.
			Array.Sort(_vt.Row(0), feat_arr);

			/// re-assign feature IDs according to the new ordering
			for (int i = 0; i < c_feat; i++)
				feat_arr[i].i_feat = i;
#endif
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

		sealed class _feat_display_order_comparer : IComparer<int>
		{
			public _feat_display_order_comparer(FeatureInfo[] rgfi)
			{
				this.feat_arr = rgfi;
			}
			readonly FeatureInfo[] feat_arr;

			public int Compare(int ifeat_1, int ifeat_2)
			{
				return feat_arr[ifeat_1].OrderIndex - feat_arr[ifeat_2].OrderIndex;
			}
		};

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// IIndexedHash(FeatureInfo)
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

		public IEnumerable<int> ConvertAll(IEnumerable<String> feats)
		{
			if (feats == null)
				yield break;
			var e = feats.GetEnumerator();
			while (e.MoveNext())
				yield return GetFeatureIndex(e.Current);
		}

		public FeatureInfo[] ConvertTo(IEnumerable<int> items)
		{
			FeatureInfo[] arr = new FeatureInfo[items._Count()];
			var e = items.GetEnumerator();
			int i = 0;
			while (e.MoveNext())
				arr[i++] = feat_arr[e.Current];
			return arr;
		}

		public int[] ConvertFrom(IEnumerable<FeatureInfo> items)
		{
			int[] arr = new int[items._Count()];
			var e = items.GetEnumerator();
			int i = 0;
			while (e.MoveNext())
				arr[i++] = e.Current.i_feat;
			return arr;
		}

		public void CopyTo(Array array, int index) { throw not.impl; }
		public bool IsSynchronized { get { return false; } }
		public Object SyncRoot { get { return this; } }

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// IIndexedHash<String>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

		int IIndexedHash<String>.this[String s_feat]
		{
			get
			{
				FeatureInfo fi;
				return feat_map.TryGetValue(s_feat, out fi) ? fi.i_feat : -1;
			}
		}
		String IReadOnlyList<String>.this[int i_feat]
		{
			get { return i_feat < 0 || i_feat >= feat_arr.Length ? null : feat_arr[i_feat].feature; }
		}

		String[] IIndexedHash<String>.ConvertTo(IEnumerable<int> items)
		{
			String[] arr = new String[items._Count()];
			var e = items.GetEnumerator();
			int i = 0;
			while (e.MoveNext())
				arr[i++] = feat_arr[e.Current].feature;
			return arr;
		}

		int[] IIndexedHash<String>.ConvertFrom(IEnumerable<String> items)
		{
			FeatureInfo fi;
			int[] arr = new int[items._Count()];
			var e = items.GetEnumerator();
			int i = 0;
			while (e.MoveNext())
				arr[i++] = feat_map.TryGetValue(e.Current, out fi) ? fi.i_feat : -1;
			return arr;
		}

		IEnumerator<String> IEnumerable<String>.GetEnumerator()
		{
			return feat_arr.Select(fi => fi.feature).GetEnumerator();
		}

#if AUTOTUNE
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///	<summary>
		/// Re-tune the feature unification order based on observed running failure counts
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

		public int[][] rgrgfix_autotune;

		public void Autotune() { RetuneAll(); }
		public void RetuneAll()
		{
			Nop.CodeCoverage();
			Task.Factory.StartNew(() =>
			{
#if false
				/// experiment with putting (only) the most failed path (i.e. SYNSEM) _last_, under the idea that it
				/// is already handled by quick-check.
				int ifeat_max_fail = -1;
				if (g.qcs != null)
					ifeat_max_fail = feat_arr.ArgMax(fi => fi.c_failures).i_feat;
#endif

				foreach (FeatureConfig fc in pod.Values)
				{
					if (fc.rg_fix.Length > 1)
					{
						/// note: depends on atomic publishing
						fc.rg_fix_autotune = fc.rg_fix
												.OrderByDescending(fix =>
													//fix == ifeat_max_fail ? -1 : 
													feat_arr[fix].c_failures)
												.ToArray();
					}
				}
				rgrgfix_autotune = type_arr.Select(t => t.fc.rg_fix_autotune).ToArray();
			});
		}
#endif

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		[Conditional("Report")]
		public void _report()
		{
			int c_used = 0;
			int c_local_waste = 0;
			int c_maximal_waste = 0;

			foreach (var tg in bcpo.type_arr.GroupBy(z => z.fc))
			{
				FeatureConfig fc = tg.Key;
				Console.Out.WriteLineColor("#{0,-3} feat: {1,-2} mod: $red {2,-2}$ [$cyan {3}$ ]",
					fc.fc_ix,
					fc.rg_fix.Length,
					fc.modulus,
					fc.ModOrder.Select(i => i == -1 ? "__" : feat_arr[i].i_feat.ToString() + "/" + feat_arr[i].feature.ToUpper()).StringJoin(" ")
					//fc.rg_fix.Select(i => feat_arr[i].feature.ToUpper()).StringJoin(" ")

					);
				Console.Out.WriteLineColor("\tused by $yellow {0}$ types", fc.c_refs);

				int dcd = fc.ChildConfigs._Count();
				int dc = fc.ChildConfigs.Sum(q => q.c_refs);
				int max_slots = fc.modulus;
				if (dc > 0)
				{
					Console.Out.WriteLineColor("\t{0} additional descendant types use $green {1}$ other distinct feature configs", dc, dcd);

					//var min_feat_child = desc.ArgMin(q => q.fc.rg_fix.Length);
					//Console.WriteLine("\tdescendant '{0}' has {1} features", min_feat_child.Name, min_feat_child.fc.rg_fix.Length);
					//var max_feat_child = desc.ArgMax(q => q.fc.rg_fix.Length);
					//Console.WriteLine("\tdescendant '{0}' has {1} features", max_feat_child.Name, max_feat_child.fc.rg_fix.Length);

					foreach (var cc in fc.ChildConfigs.OrderBy(q => q.rg_fix.Length))
						Console.Out.WriteLine("\t\t{0}", cc.ToString());
					max_slots = fc.ChildConfigs.Max(q => q.modulus);
				}
				Console.Out.WriteLineColor("\tmaximum slots $red {0}$ ", max_slots);

				c_used += fc.rg_fix.Length * fc.c_refs;
				c_local_waste += fc.modulus * fc.c_refs;
				c_maximal_waste += max_slots * fc.c_refs;

				Console.WriteLine();
			}

			Console.WriteLine("{ " + feat_arr.Select(fi => "\"" + fi.feature.ToUpper() + "\"").StringJoin(", ") + " };");

			Console.Out.WriteLineColor("c_feat:                              $yellow {0,9:##0}", feat_arr.Length);
			Console.Out.WriteLineColor("c_fc:                                $yellow {0,9:##0}", rg_fc.Length);
			Console.Out.WriteLineColor("min config:                          $yellow {0,9:##0}", rg_fc.Min(q => q.rg_fix.Length));
			Console.Out.WriteLineColor("avg config:                          $yellow {0,9:0.000}", rg_fc.Average(q => q.rg_fix.Length));
			Console.Out.WriteLineColor("weighted avg config:                 $yellow {0,9:0.000}", rg_fc.WeightedAverage(q => q.rg_fix.Length, q => q.c_refs));
			Console.Out.WriteLineColor("max config:                          $yellow {0,9:###}", rg_fc.Max(q => q.rg_fix.Length));

			// NOTE YOU CAN ELIMINATE ZEROS BY USING ###
			Console.WriteLine();
			Console.Out.WriteLineColor("min config:                          $yellow {0,9:##0}", rg_fc.Min(q => q.modulus));
			Console.Out.WriteLineColor("avg config:                          $yellow {0,9:0.000}", rg_fc.Average(q => q.modulus));
			Console.Out.WriteLineColor("weighted avg config:                 $yellow {0,9:0.000}", rg_fc.WeightedAverage(q => q.modulus, q => q.c_refs));
			Console.Out.WriteLineColor("max config:                          $yellow {0,9:###}", rg_fc.Max(q => q.modulus));
			Console.Out.WriteLineColor("total actual need:                   $yellow {0,9:#,###}", c_used);
			Console.Out.WriteLineColor("local waste:                         $yellow {0,9:#,###} {1,8:0.000}%", c_local_waste, (c_local_waste - c_used) * 100.0 / c_used);
			Console.Out.WriteLineColor("maximal (during unification?) waste: $yellow {0,9:#,###} {1,8:0.000}%", c_maximal_waste, c_maximal_waste * 100.0 / c_used);

			Console.WriteLine();
			var worst = rg_fc.ArgMax(q => q.modulus);
			Console.Out.WriteLineColor("#{0,-3} feat: {1,-2} mod: $red {2,-2}$ [$cyan {3}$ ]",
				worst.fc_ix,
				worst.rg_fix.Length,
				worst.modulus,
				worst.ModOrder.Select(i => i == -1 ? "__" : feat_arr[i].i_feat.ToString() + "/" + feat_arr[i].feature.ToUpper()).StringJoin(" "));

			Console.WriteLine();
			foreach (var fi in feat_arr)
			{
				if (worst.ba[fi.i_feat])
					Console.Out.WriteColor("$cyan " + fi.i_feat.ToString() + "/" + fi.feature.ToUpper() + " ");
				else
					Console.Write(fi.i_feat.ToString() + "/" + fi.feature.ToUpper() + " ");
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// check root coverage of all expanded TFSes
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void _root_coverage_report()
		{
			foreach (Type t in bcpo.type_arr.Skip(1))
			{
				if (!((ArrayTfs)t.Expanded).IsRootCovered)
				{
					BitArr ba = new BitArr(feat_arr.Length,
								((ArrayTfs)t.Expanded).entries
								.Where(ate => ate.mark == 1)
								.Select(ate => feat_arr[ate.i_feat].i_feat));

					Console.WriteLine("{0,40} {1}", t.Name,
						 t.fc.rg_fix
								.Select(i => feat_arr[i].feature.ToUpper())
								.StringJoin(" "));
					Console.WriteLine(new String(' ', 40) + " {0}", t.fc.rg_fix
								.Select(i => ba[i] ? feat_arr[i].feature.ToUpper() : new String(' ', feat_arr[i].feature.Length))
								.StringJoin(" "));
				}
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void _dump_all_feature_structures(String def_file, String fs_file, String fs_ent_file)
		{
			if (def_file != null)
			{
				using (StreamWriter sw = new StreamWriter(def_file, false))
				{
					Tfs.MonospaceFormatter mf = new Tfs.MonospaceFormatter(this, false);
					foreach (Type t in bcpo.type_arr)
					{
						String s_tdl;
						if (t.HasAppropriateFeatures && !String.IsNullOrWhiteSpace(s_tdl = mf.Format(t.Definition)))
							sw.WriteLine("====== " + s_tdl);
					}
				}
			}

			if (fs_file != null)
			{
				using (StreamWriter sw = new StreamWriter(fs_file, false))
				{
					Tfs.MonospaceFormatter mf = new Tfs.MonospaceFormatter(this);
					foreach (Type t in bcpo.type_arr)
					{
						if (t.HasAppropriateFeatures)
						{
							sw.WriteLine("====== {0}", t.Name);

							String s_tdl = mf.Format(t.Expanded);

							sw.WriteLine(s_tdl);
						}
					}
				}

				using (StreamReader sr1 = new StreamReader("fs-dump.txt"))
				using (StreamReader sr2 = new StreamReader("fs-dump-cons-fill.txt"))
					if (sr1.BaseStream.ContentsEquals(sr2.BaseStream))
						Console.WriteLine("fs-dump ok.");
					else
						Console.WriteLine("fs-dump != fs-dump-cons-fill");
			}

			if (fs_ent_file != null)
			{
				using (StreamWriter sw = new StreamWriter(fs_ent_file, false))
				{
					Tfs.MonospaceFormatter mf = new Tfs.MonospaceFormatter(this);
					foreach (Entry e in em.AllEntries.OfType<TriggerRule>())
					{
						sw.WriteLine("====== {0}", e.Name);

						String s_tdl = mf.Format(e.Expanded);

						sw.WriteLine(s_tdl);
					}
				}

				using (StreamReader sr1 = new StreamReader("fs-dump-ent.txt"))
				using (StreamReader sr2 = new StreamReader("fs-dump-ent-oldway.txt"))
					if (sr1.BaseStream.ContentsEquals(sr2.BaseStream))
						Console.WriteLine("fs-dump-ent ok.");
					else
						Console.WriteLine("fs-dump-ent != fs-dump-ent-oldway");
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		///
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public String GetInfo()
		{
			StringBuilder sb = new StringBuilder();

			sb.AppendFormat("<b>Information for '{0}':</b><br />", Name);
			sb.Append("<br />");
			sb.AppendFormat("Number of authored types: <span style='color:#008000;'>{0:#,#}</span><br />", bcpo.c_authored_types);
			sb.AppendFormat("Number of GLB types: <span style='color:#008000;'>{0:#,#}</span><br />", bcpo.type_arr.Length - bcpo.c_authored_types);
			sb.AppendFormat("Total types: <span style='color:#008000;'>{0:#,#}</span><br />", bcpo.type_arr.Length);
			sb.Append("<br />");
			sb.AppendFormat("Number of grammar rules: <span style='color:#008000;'>{0:#,#}</span><br />", this.em.AllEntries.Count(e => e is SyntaxRule));
			sb.AppendFormat("Number of lexical rules: <span style='color:#008000;'>{0:#,#}</span><br />", this.em.AllEntries.Count(e => e.GetType() == typeof(LexicalRule)));
			sb.AppendFormat("Number of inflection rules: <span style='color:#008000;'>{0:#,#}</span><br />", this.em.AllEntries.Count(e => e is MorphologicalRule));
			sb.AppendFormat("Number of start symbols: <span style='color:#008000;'>{0:#,#}</span><br />", this.em.AllEntries.Count(e => e is StartSymbol));
			sb.AppendFormat("Number of node labels: <span style='color:#008000;'>{0:#,#}</span><br />", this.em.AllEntries.Count(e => e is NodeLabel));
			//sb.AppendFormat("Number of lexicon entries: <span style='color:#008000;'>{0:#,#}</span><br />", this.lex.Count);
			sb.Append("<br />");
			//sb.AppendFormat("Number of edges: <span style='color:#008000;'>{0:#,#}</span><br />", loadtray.PoolMarkCount);

			return sb.ToString();
		}
	};
}
