﻿//#define SYSTEM_DRAWING
//#define OB_OPT
//#define CHECK_REDUNDANT_LINKS

using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
#if SYSTEM_DRAWING
using System.Drawing;
#endif

using alib.BitArray;
using alib.Collections;
using alib.Collections.ReadOnly;
using alib.Debugging;
using alib.Enumerable;
using alib.String;
using alib;

namespace agree
{
	[DebuggerDisplay("{ToString(),nq}")]
	public struct type_parents
	{
		public Type type;
		public Type[] parents;
		public type_parents(Type t, IEnumerable<Type> parents)
		{
			this.type = t;
			this.parents = parents as Type[] ?? parents.ToArray();
		}
		public override string ToString()
		{
			return String.Format("{0} {1}", parents.Length, type.ToString());
		}
	};

	public sealed partial class TypeUtils : has_grammar_base
	{
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// Check authored type hierarchy for redundant parent-child relationships.
		/// DELPH-IN grammars seem to be generally well-behaved in this regard, so the check is currently not
		/// performed as a matter of course on the supplied hierarchy. Note that this version does not do
		/// large bit operations, instead examining the Types' parent/child HashSets.
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		[DebuggerDisplay("{ToString(),nq}")]
		class covering_child
		{
			[DebuggerBrowsable(DebuggerBrowsableState.Never)]
			public Type ch;
			[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
			public HashSet<int> cov = new HashSet<int>();

			public override String ToString()
			{
				return String.Format("child {0} covers {1} bits: {2}", ch.Name, cov.Count, cov.StringJoin(" "));
			}
		};

		[DebuggerDisplay("{ix}  covered by {covered_by.Count}: {ToString(),nq}")]
		struct index_coverage
		{
			[DebuggerBrowsable(DebuggerBrowsableState.Never)]
			public int ix;
			[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
			public HashSet<covering_child> covered_by;
			public override String ToString()
			{
				return covered_by.Select(cc => cc.ch.Name).StringJoin(" ");
			}
		};

		int check_missing_links(Type[] types_in)
		{
			int c_missing = 0;
			using (TimingReport tr = new TimingReport(Console.Out, "Check for missing lattice edges"))
			{
				HashSet<Type>[] rghs = types_in.Select(t => t.AllAncestors).ToArray();

				for (int i = 0; i < types_in.Length; i++)
				{
					Type t0 = types_in[i];
					List<Type> errors = null;
					Type upper_error = null;
					for (int j = types_in.Length - 1; j > i; j--)
					{
						Type t1 = types_in[j];

						BitArr ba = t0.bit_vec.AndWithHash(t1.bit_vec);
						if (ba == null)
							continue;
						else if (ba.OnesCount == t0.bit_vec.OnesCount)
						{
							throw new Exception();
						}
						else if (ba.OnesCount == t1.bit_vec.OnesCount)
						{
							if (!rghs[t1._id].Contains(t0))
							{
								if (upper_error == null)
								{
									upper_error = t0;
									errors = new List<Type>();
								}
								errors.Add(t1);
							}
						}
						else if (ba.OnesCount != 0)
						{
							Type glb;
							if (!bcpo.code_dict.TryGetValue(ba, out glb))
								throw new Exception("missing GLB");
							var hs = rghs[glb._id];
							if (!hs.Contains(t0))
							{
								if (upper_error == null)
								{
									upper_error = t0;
									errors = new List<Type>();
								}
								errors.Add(glb);
							}
							if (!hs.Contains(t1))
							{
								if (upper_error == null)
								{
									upper_error = t1;
									errors = new List<Type>();
								}
								errors.Add(glb);
							}
						}
						else
							throw new Exception();
					}
					if (upper_error != null)
					{
						c_missing += errors.Count;

						/// prevent further reporting of this problem
						foreach (var t in errors)
							rghs[t._id].Add(upper_error);

						Type lower_error = errors.RemoveLast();
						Console.Write("error: cannot reach {0} from {1}", upper_error.Name, lower_error.Name);
						if (errors.Count > 0)
							Console.Write(" and {0} other subsumed types", errors.Count);
						Console.WriteLine();
					}
				}
			}
			return c_missing;
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// Examine Ait-Kaci code bits. If there's only one child which covers a parent bit, then the child is required, so 
		/// deduct all of the child's code bits from the set of bits required to cover the parent. Redundant links are
		/// uncovered during this process as contributing coverage which is no longer necessary.
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		bool check_redundant_links(System.IO.TextWriter sw, IEnumerable<Type> types)
		{
			int c_edges_processed = 0;
			bool f_any = false;
			foreach (Type p in types)
			{
				//if (p == ((TypeMgr)this).TopType)
				//    continue;
				//Debug.Assert((p.Children.Length == 0) == p.IsLeaf);
				//if (p.IsLeaf)
				//    continue;

				int c_children = p.Children.Length;

				if (c_children == 0)
					continue;
				if (p.c_children != c_children)
					throw new Exception();
				c_edges_processed += c_children;

				/// the key data structure is a list of (bit-position, set-of-covering-children) mappings, where 
				/// a 'covering-child' is a shared reference type which keeps track of the bit positions it 
				/// is (still) covering. We become interested in cases where the set-of-covering-children has
				/// more than one member, in particular, whether a member which becomes the sole remaining
				/// covering child is covering, at that moment, only bits which are being eliminated.
				var coverage_map = p.Children
									.SelectMany(c =>
									{
										var cc = new covering_child { ch = c };
										cc.cov.UnionWith(c.bit_vec.OnesPositions());
										return cc.cov.Select(ix => new { ix = ix, cc = cc });
									})
									.GroupBy(a => a.ix, a => a.cc)
									.Select(g => new index_coverage { ix = g.Key, covered_by = g.ToHashSet() })
									.ToList();

				for (int i = 0; i < coverage_map.Count; )
				{
					var g = coverage_map[i];
					if (g.covered_by.Count == 1)
					{
						var ch_upper = g.covered_by.First();
						Debug.Assert(ch_upper.cov.Count > 0);

						try
						{
							coverage_map.RemoveAll(ix_cov =>
							{
#if false
								foreach (var ch_lower in ix_cov.covered_by)
								{
									if (ch_lower == ch_upper)
										return true;

									if (ch_lower.ch.m_code.IsSubsumedBy(ch_upper.ch.m_code))
									{
										String s_l = String.Format("'{0} := {1}'", ch_lower.ch.Name, p.Name);
										String s_u = String.Format("'{0} := {1}'", ch_upper.ch.Name, p.Name);
										String msg = String.Format("Redundant: {0,44}, already covered by {1}.",
											s_l,
											s_u);
										throw new RedundantGraphLinksException(msg);
									}
								}
								return false;
#else
								ix_cov.covered_by.RemoveWhere(ch_lower =>
								{
									if (ch_lower == ch_upper)
										return true;

									if (ch_lower.ch.IsSubsumedBy(ch_upper.ch))
									{
										//if (ch_lower.ch.i_parents.Contains(p))
										//    if (ch_lower.ch.i_parents.Contains(ch_upper.ch))
										//        Nop.X();
										//    else
										//        Nop.X();
										//else
										//    Nop.X();
										String s_l = String.Format("'{0} := {1}'", ch_lower.ch.Name, p.Name);
										//String s_u = String.Format("'{0} := {1}'", ch_upper.ch.Name, p.Name);
										//String msg = String.Format("Redundant: {0,44}, already covered by {1}.",
										//    s_l,
										//    s_u);
										String msg = String.Format("Redundant: {0}, already covered by:\r\n", s_l);

										bool f_started = false;
										foreach (var sp in find_paths_between(ch_lower.ch, p).Skip(1))
										{
											msg += "\t  " + sp + "\r\n";
											f_started = true;
										}

										var iz = find_paths_between(ch_lower.ch, ch_upper.ch);
										if (f_started && iz.Any())
											msg += "\tand\r\n";
										foreach (var sp in iz)
										{
											msg += "\t  " + sp + "\r\n";
											f_started = true;
										}

										iz = find_paths_between(ch_upper.ch, ch_lower.ch);
										if (f_started && iz.Any())
											msg += "\tand\r\n";
										foreach (var sp in iz)
										{
											msg += "\t  " + sp + "\r\n";
											f_started = true;
										}

										iz = find_paths_between(ch_upper.ch, p);
										if (f_started && iz.Any())
											msg += "\tand\r\n";
										foreach (var sp in iz)
										{
											msg += "\t  " + sp + "\r\n";
											f_started = true;
										}

										msg += String.Format("\tparents of {0}:\t\n", ch_lower.ch.Name);
										foreach (var cp in ch_lower.ch.parents)
										{
											msg += "\t  " + cp.Name + "\r\n";
										}

										Type tu = UnifyTypesFull(ch_lower.ch, ch_upper.ch);
										msg += "\tunification:\r\n";
										if (tu == null)
											msg += "\t  none!\r\n";
										else
											msg += String.Format("\t  {0} = {1} & {2}\r\n", tu.Name, ch_lower.ch.Name, ch_upper.ch.Name);

										//msg += "\tjoint children:\r\n";
										//foreach (var jc in ch_lower.ch.AllDescendants.Intersect(ch_upper.ch.AllDescendants))
										//{
										//    msg += String.Format("\t  {0}\r\n", jc.Name);
										//}


										throw new RedundantGraphLinksException(msg);
									}
									return false;
								});
#endif
								return ix_cov.covered_by.Count == 0;
							});
							i = 0;
						}
						catch (RedundantGraphLinksException ex)
						{
							f_any = true;
							sw.WriteLine(ex.Message);
							break;
						}
					}
					else
						i++;
				}
			}
			sw.WriteLine("Processed {0} child edges", c_edges_processed);
			return f_any;
		}

		static IEnumerable<String> find_paths_between(Type lower, Type seek_upper)
		{
			foreach (String ret in fpb2(lower, seek_upper))
				yield return lower.Name + " := " + ret;
		}
		static IEnumerable<String> fpb2(Type lower, Type seek_upper)
		{
			foreach (var p in lower.parents)
				if (p == seek_upper)
					yield return p.Name;
				else
					foreach (String ret in fpb2(p, seek_upper))
						yield return p.Name + " := " + ret;
		}


		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// Several facets of the lattice partial order should agree:
		/// 1. the physical order in the sequence, should agree with...
		/// 2. the index of that physical order as stored in the type...
		/// 2. a subsumption relationship obtained from ANDing the Ait-Kaci bits...
		/// 3. the m_level notation stored in the type
		/// 4. (optional) order by the descending number of set (1s) bits, (which places all leaf nodes at the end)...
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void _verify_array_partial_order(IEnumerable<Type> ie)
		{
			using (var tr = new TimingReport(Console.Out, "Verify type array topology"))
			{
				Type[] arr = ie as Type[] ?? ie.ToArray();
				int c_prob = 0;
				int c_top = 0;
				for (int i = 0; i < arr.Length; i++)
				{
					Type upper = arr[i];

					if (upper._id != i)
						throw new Exception();

					if ((upper == td.ΔTop) != (upper._id == configuration.TypesConfig.TopId))
						throw new Exception();

					if (upper._id == configuration.TypesConfig.TopId)
					{
						if (++c_top > 1)
							throw new Exception();
						if (!upper.bit_vec.IsAllOnes)
							throw new Exception();
					}

					if ((upper.bit_vec.OnesCount == 1) != (upper.c_children == 0))
						throw new Exception();

					if (upper.bit_num >= 0 && !upper.bit_vec[upper.bit_num])
						throw new Exception();

#if M_LEVEL
					for (int j = i + 1; j < arr.Length; j++)
					{
						Type lower = arr[j];

						if (upper.m_level > lower.m_level)
						{
							Console.WriteLine("L {0,-25} ix: {1,4} level: {2:X4}  >  {3,-25} ix: {4,4} level: {5:X4} {6}",
								upper.Name, i, upper.m_level,
								lower.Name, j, lower.m_level,
								code_dict.ContainsKey(upper.m_code & lower.m_code) ? "code" : "no-code");
							c_prob++;
						}

						if (upper.m_level == lower.m_level && upper.m_code.OnesCount < lower.m_code.OnesCount)
							throw new Exception();

						if (upper.m_code.IsSubsumedBy(lower.m_code))
						{
							if (upper.m_level == lower.m_level)
								throw new Exception();

							Console.WriteLine("C {0,-25} ix: {1,4} level: {2:X4}  [  {3,-25} ix: {4,4} level: {5:X4} {6}",
								lower.Name, j, lower.m_level,
								upper.Name, i, upper.m_level,
								code_dict.ContainsKey(upper.m_code & lower.m_code) ? "code" : "no-code");
							c_prob++;
						}
					}
#endif
				}
				LatticeSummaryReport(arr);
				Console.WriteLine(Environment.NewLine + "{0} problems.", c_prob == 0 ? "no" : c_prob.ToString());
			}
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// <summary>
		/// 
		/// </summary>
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		public void LatticeSummaryReport(ICollection<Type> arr)
		{
			//String xx1 = " info    Level   #Types     from  ix  to   GLBs  pedges  #cnstr  #atom  bits-min-max";
			//String xx2 = "-----  -------  -------     ----    ----   ----  ------  ------  -----  ----    ----";
			//SysObj.Instance.TransactionStatus(this,"Type lattice and partial order summary".PadCenter(xx1.Length, ' ').TrimEnd());
			//SysObj.Instance.TransactionStatus(this, xx1);
			//SysObj.Instance.TransactionStatus(this, xx2);

#if M_LEVEL
			int lev = 0;
			int pedges_tot = 0;
			int cglb_tot = 0;
			int cst_tot = 0;

			foreach (var g in arr.Select((t, ix) => new { t, ix }).GroupBy(a => a.t.m_level))
			{
				int c_glb = g.Count(a => a.t.IsGlb);
				cglb_tot += c_glb;

				int pe = g.Sum(a => a.t.i_parents.Count);
				pedges_tot += pe;

				int cst = g.Count(a => a.t.HasAppropriateFeatures);
				cst_tot += cst;

				int c_lev = g._Count();

				Console.WriteLine("{0,5} {1,8:X}     {2,4}     {3,4} .. {4,4}  {5,5}  {6,6}  {7,6}  {8,5}  {9,4} .. {10,4}",
					lev == 0 ? TopType.Name : g.Key == int.MaxValue ? "leaf" : lev.ToString(),
					g.Key,
					c_lev,
					g.Min(a => a.ix),
					g.Max(a => a.ix),
					c_glb == 0 ? "" : c_glb.ToString(),
					pe,
					cst == 0 ? "" : cst.ToString(),
					(c_lev - cst) == 0 ? "" : (c_lev - cst).ToString(),
					g.Min(a => a.t.m_code.OnesCount),
					g.Max(a => a.t.m_code.OnesCount)
					);

				lev++;
			}
			Console.WriteLine("-----           -------                    ----  ------  ------  -----  ----    ----");
			Console.WriteLine("{0,5} {1,8:X}     {2,4}     {3,4}    {4,4}  {5,5}  {6,6}  {7,6}  {8,5}  {9,4}    {10,4}",
				lev,
				"",
				arr.Count,
				"",
				"",
				cglb_tot,
				pedges_tot,
				cst_tot,
				arr.Count - cst_tot,
				"",
				"");
#endif
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// build a new partial order from the type's parent lists, and verify that new partial order. Restore the
		/// previous levels back when complete.
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		void _validate_lattice_partial_order(IEnumerable<Type> ie)
		{
#if M_LEVEL
			var sav_lev = ie.ToDictionary(t => t, t => Interlocked.Exchange(ref t.m_level, 0));
			//new_child_gen++;
			foreach (Type t in ie)
				t.FindLevel();
			_verify_array_partial_order(ie);
			foreach (var kvp in sav_lev)
				kvp.Key.m_level = kvp.Value;
#endif
		}

		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		/// 
		///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
		static void _console_dump(IEnumerable<Type> upper, Type newtype, IEnumerable<Type> lower)
		{
#if M_LEVEL
			int bufwide = Console.BufferWidth - 1;
			String ps = upper.Select(t => String.Format("{0}-{1}", t.m_level, t.Name.Left(20))).StringJoin(" ");
			String ms = String.Format("{0}-{1}", newtype.m_level, newtype.Name);
			String cs = lower.Select(t => String.Format("{0}-{1}", t.m_level, t.Name.Left(20))).StringJoin(" ");

			if (ps.Length > bufwide)
				Console.WriteLine(ps);
			else
				Console.WriteLine(ps.PadCenter(bufwide, ' ').TrimEnd());
			Console.WriteLine(ms.PadCenter(bufwide, ' ').TrimEnd());
			if (cs.Length > bufwide)
				Console.WriteLine(cs);
			else
				Console.WriteLine(cs.PadCenter(bufwide, ' ').TrimEnd());
			Console.WriteLine();
#endif
		}

		void WriteLattice()
		{
			if (System.IO.File.Exists("lattice-latest.txt"))
				System.IO.File.Copy("lattice-latest.txt", "lattice-prev.txt", true);
			using (System.IO.StreamWriter sw = new System.IO.StreamWriter("lattice-latest.txt"))
			{
				WriteLattice(sw);
				sw.BaseStream.Flush();
			}
			System.IO.File.Copy("lattice-latest.txt", String.Format("lattice-{0:yyyyMMdd\\-hhmmss}.txt", DateTime.Now), true);
		}

		void WriteLattice(System.IO.StreamWriter sw)
		{
			sw.WriteLine("{0} edges", bcpo.type_arr.Sum(t => t.parents.Count));
			sw.WriteLine();

			for (int i = 0; i < bcpo.type_arr.Length; i++)
			{
				Type t = bcpo.type_arr[i];
				sw.Write("{0,4}", i, t.Name);
				if (t.bit_num >= 0)
					sw.Write(" bit:{0,-4}", t.bit_num);
				else if (t.bit_num == -1)
					sw.Write(" TOP");
				else if (t.bit_num == -2)
					sw.Write(" GLB");
				else
					sw.Write(" ????");
				sw.WriteLine(" cbits:{0,-4} cpar:{1} {2}", t.bit_vec.OnesCount, t.parents.Count, t.Name);
				foreach (var p in t.parents.OrderBy(x => x, Type.Compare.DescendantCount.Reverse))
					sw.WriteLine("\t{0,-4} {1}", p._id, p.Name);
			}
		}
	};

	public sealed partial class TypeLattice : has_grammar_base, _ICollection<Type>, _IList<Type>
	{
		//[Conditional("DEBUG")]
		public void _dbg_display_type_hierarchy_info()
		{
			//Task.Factory.StartNew(() =>
			{
				int c_cp_edges = type_arr.Sum(t => t.parents.Count);
				//SysObj.Instance.TransactionStatus(this, "types {0} closed {1} glb {2} cpe {3}",
				//	c_authored_types,
				//	type_arr.Length,
				//	type_arr.Length - c_authored_types,
				//	c_cp_edges);
			}

#if false
			/// calculate how many bits will be used for stringid/typeid
			int c_f_bits = 32 - alib.Bits._bitarray_ext.HighestOne((int)Edge.Flag.LowestFlagValue);
			int c_t_bits = alib.Bits._bitarray_ext.HighestOne(type_arr.Length) + 1;
			int c_s_bits = alib.Bits._bitarray_ext.HighestOne(strings.Count) + 1;
			int c_ts_bits = Math.Max(c_t_bits, c_s_bits);

			if ((Edge.Flag)((1 << c_ts_bits) - 1) > Edge.Flag.MultiIdMask)
				throw new TdlException("Too many types ({0}) or strings ({1}) defined in the type hierarchy.", c_authored_types, strings.Count);

			var fconfigs = type_arr
							.Select(t => new { type = t, features = t.fc })		//anon types are class, not struct
							.GroupBy(a => a.features, a => a.type)
							.ToArray();

			Console.Write("{0} types ({1}b), {2} strings ({3}b)",
				c_authored_types,
				c_t_bits,
				strings.Count,
				c_s_bits);

			if (feat_arr != null)
				Console.Write(", {0} feat in {1} configurations (max feat/type {2}). -> edge bits: flags:{3} fconfigs:{4} sid/tid: {5}",
					feat_arr.Length,
					fconfigs.Length,
					fconfigs.Max(grp => grp.Key.Count),
					c_f_bits,
					32 - c_f_bits - c_ts_bits,
					c_ts_bits);
			Console.WriteLine();
#endif
		}

		void various_reports()
		{
#if LATTICE_TESTING
			var xx = new Dictionary<String, Type>(this);
			var xx1 = base.Values.ToDictionary(kvp => kvp.Name, kvp =>
				new
				{
					kvp.c_children,
					flags = kvp.m_flags & (Instance.Flags.HasConstraints | Instance.Flags.TopType),
					parents = kvp.Parents.ToArray(),
					kvp.m_id,
				});
#endif

			//int c_missing = check_missing_links(type_arr);
			//if (c_missing != 0)
			//{
			//    Console.Write("total missing {0}", c_missing);
			//    Environment.Exit(0);
			//}

			//check_redundant_links(Console.Out, type_arr);

			//_verify_array_partial_order(type_arr);

			//WriteLattice();

			//LatticeSummaryReport(type_arr);

#if LATTICE_TESTING
			System.Threading.Thread.Sleep(300);
			Stopwatch swz = new Stopwatch();
			int j;

			//BitArr.IgnoreHintThreshold = 18;
			//while (true)
			{
				double tms = 0;

				//if (++BitArr.IgnoreHintThreshold == 20)
				//    BitArr.IgnoreHintThreshold = 7;
				//if (BitArr.IgnoreHintThreshold == 12)
				//    BitArr.IgnoreHintThreshold = 18;
				//else
				//    BitArr.IgnoreHintThreshold = 12;

				for (j = 0; j < 40; j++)
				{
					foreach (var t in xx.Values)
					{
						t.m_id = xx1[t.Name].m_id;
						t.m_flags = xx1[t.Name].flags;
						t.m_consdefs = null;
						t.m_level = 0;
						t.m_bit = 0;
						t.m_code = null;
						t.c_children = xx1[t.Name].c_children;
						t.i_parents = new HashSet<Type>(xx1[t.Name].parents);
					}
					this.Clear();
					code_dict = null;
					type_arr = null;
					c_authored_types = 0;
					foreach (var kvp in xx)
						this.Add(kvp.Key, kvp.Value);

					c_sort = c_tot = 0;
					swz.Reset();
					swz.Start();
					base.EmbedBcpo();
					swz.Stop();

					Double etm = swz.Elapsed.TotalMilliseconds;
					tms += etm;
					Console.WriteLine("{0,2} {1:0.000} {2} {3:0.00}",
						j,
						etm,
						type_arr.Sum(t => t.Parents.Count),
						(double)c_sort * 100.0 / c_tot);
				}
				Console.WriteLine("{0:0.000}", tms / j);
				//Console.WriteLine("bat:{0,-2} {1:0.000}", BitArr.IgnoreHintThreshold, tms / j);
				//Console.WriteLine("AND fail {0:0.000}", BitArr.c_null * 100.0 / BitArr.c_tot);
				//Console.WriteLine("bitarr: {0:0.000}", BitArr.sw0.Elapsed.TotalMilliseconds);
			}
			Environment.Exit(0);
#endif

		}

#if LATTICE_TESTING
		public static int c_sort;
		public static int c_tot;
#endif

		public void Add(Type item) { throw not.impl; }
		public void Clear() { throw not.impl; }
		public bool Contains(Type item) { throw not.impl; }
		public void CopyTo(Type[] array, int arrayIndex) { throw not.impl; }
		public bool IsReadOnly { get { throw not.impl; } }
		public bool Remove(Type item) { throw not.impl; }
		public void CopyTo(System.Array array, int index) { throw not.impl; }
		public bool IsSynchronized { get { throw not.impl; } }
		public object SyncRoot { get { throw not.impl; } }


#if SYSTEM_DRAWING
		protected Bitmap write_lattice_image(Type[] arr)
		{
			int code_size = arr[0].m_code.Count;
			Bitmap bm = new Bitmap(code_size + 4, arr.Length + 4, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
			using (Graphics gx = Graphics.FromImage(bm))
			{
				gx.FillRectangle(Brushes.White, new Rectangle(0, 0, arr[0].m_code.Count + 4, arr.Length + 4));
			}
			Color black = Color.FromArgb(0, 0, 0);
			for (int i = 0; i < arr.Length; i++)
			{
				int y = i + 2;
				Type t = arr[i];
				foreach (int bp in t.m_code.OnesPositions())
				{
					int x = bp + 2;
					bm.SetPixel(x, y, black);

					dpdp2(bm, x, y, .8);
				}
			}
			return bm;
		}

		static void dpdp1(Bitmap bm, int x, int y, double damp)
		{
			DarkenPx(bm, x - 1, y - 1, damp);
			DarkenPx(bm, x + 0, y - 1, damp * .7);
			DarkenPx(bm, x + 1, y - 1, damp);
			DarkenPx(bm, x - 1, y + 0, damp * .7);
			DarkenPx(bm, x + 1, y + 0, damp * .7);
			DarkenPx(bm, x - 1, y + 1, damp);
			DarkenPx(bm, x + 0, y + 1, damp * .7);
			DarkenPx(bm, x + 1, y + 1, damp);
		}

		static void dpdp2(Bitmap bm, int x, int y, double damp)
		{
			dpdp1(bm, x, y, damp);

			dpdp1(bm, x, y, damp);

			DarkenPx(bm, x - 2, y - 2, damp);
			DarkenPx(bm, x - 1, y - 2, damp * .8);
			DarkenPx(bm, x + 0, y - 2, damp * .8);
			DarkenPx(bm, x + 1, y - 2, damp * .8);
			DarkenPx(bm, x + 2, y - 2, damp);

			DarkenPx(bm, x - 2, y - 1, damp * .8);
			DarkenPx(bm, x + 2, y - 1, damp * .8);

			DarkenPx(bm, x - 2, y + 0, damp * .8);
			DarkenPx(bm, x + 2, y + 0, damp * .8);

			DarkenPx(bm, x - 2, y + 1, damp * .8);
			DarkenPx(bm, x + 2, y + 1, damp * .8);

			DarkenPx(bm, x - 2, y + 2, damp);
			DarkenPx(bm, x - 1, y + 2, damp * .8);
			DarkenPx(bm, x + 0, y + 2, damp * .8);
			DarkenPx(bm, x + 1, y + 2, damp * .8);
			DarkenPx(bm, x + 2, y + 2, damp);

		}

		static void DarkenPx(Bitmap bm, int x, int y, double damp)
		{
			Color ec = bm.GetPixel(x, y);
			ec = Color.FromArgb((byte)(ec.R * damp), (byte)(ec.G * damp), (byte)(ec.B * damp));
			bm.SetPixel(x, y, ec);
		}
#endif
	}
};