﻿#if NOt
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

namespace micwav
{
	class oldcode
	{
	}
		private void WriteFileTrailer(int SampleCount)
		{
			bw.Seek(4, SeekOrigin.Begin);
			bw.Write((int)(SampleCount + 36));
			bw.Seek(40, SeekOrigin.Begin);
			bw.Write(SampleCount);
			bw.Close();
		}
private void WriteFileChunks()
		{
			int readPos = 0, capturePos;

			TheCaptureBuffer.GetCurrentPosition(out capturePos, out readPos);

			int CaptureOffset = 0;

			int lockSize = readPos - CaptureOffset;
			if (lockSize < 0)
				lockSize += CaptureBuffSize;

			lockSize -= (lockSize % NotifySize);

			if (0 == lockSize)
				return;

			byte[] capturedData = (byte[])TheCaptureBuffer.Read(CaptureOffset, typeof(byte), DirectX.LockFlag.None, lockSize);
			bw.Write(capturedData, 0, capturedData.Length);

			int SampleCount = 0;

			SampleCount += capturedData.Length;

			// CaptureOffset probably not required - hangover from threaded implementation
			CaptureOffset += capturedData.Length;
			CaptureOffset %= CaptureBuffSize;

			WriteFileTrailer(SampleCount);
		}

	public void StopRecording()
		{
			TheCaptureBuffer.Stop();
			WriteFileChunks();
			//WriteFileTrailer();
		}

		void unkStuff()
		{

			MemoryStream ms = new MemoryStream();//capturedData);

			char[] Riff = { 'R', 'I', 'F', 'F' };
			char[] Wave = { 'W', 'A', 'V', 'E' };
			char[] Fmt = { 'f', 'm', 't', ' ' };
			char[] Data = { 'd', 'a', 't', 'a' };
			short padding = 1;
			int formatLength = 0x10;
		//	int length = 0; // fill this in later!
			short shBytesPerSample = 2; // changing the WaveFormat recording parameters will impact on this
			// see referenced blog posts for more details

			var bw = new BinaryWriter(ms);
			bw.Write(Riff);
			bw.Write(capturedData.Length + 36);//length);
			bw.Write(Wave);
			bw.Write(Fmt);
			bw.Write(formatLength);
			bw.Write(padding);
			bw.Write(waveFormat.Channels);
			bw.Write(waveFormat.SamplesPerSecond);
			bw.Write(waveFormat.AverageBytesPerSecond);
			bw.Write(shBytesPerSample);
			bw.Write(waveFormat.BitsPerSample);

			bw.Write(Data);
			bw.Write(capturedData.Length);//(int)0); // update sample later
			ms.Write(capturedData, 0, capturedData.Length);

			ms.Position = 0;
			WaveFileReader r = new WaveFileReader(ms);

			int i = 0;
			while (true)
			{
				float[] dd = r.ReadNextSampleFrame();
				if (dd == null)
					break;

				byte b1 = capturedData[i];
				byte b2 = capturedData[i+1];

				short s = BitConverter.ToInt16(capturedData, i);
//				ushort us = BitConverter.ToUInt16(capturedData, i);


				float a = s / 32768f;
				//float a2 = us / 32768f;

				i += 2;

				//byte[] value = new byte[2];
				//int read = Read(value, 0, 2);
				//if (read < 2)
				//    return false;
				//sampleValue = (float)BitConverter.ToInt16(value, 0) / 32768f;
				//return true;
				if (dd[0] != a)
					throw new Exception("err");

				Console.WriteLine("AA: " + dd.Length + ", " + dd[0]);
			}
		}		


				// det vi kan gjøre er å fikse EN ENKELT avviker på opptil 5? samples.
			if (!fixupDone && false)
			{
				fixupDone = true;

				int diff = totSamplesGen - totSamples;
				if (Math.Abs(diff) > 4)
				{
					Log("diff too high " + diff);

					GenEntry h1 = highs[0];
					GenEntry h2 = highs[1];

					GenEntry l1 = lows[0];
					GenEntry l2 = lows[1];

					bool aga = false;
					if (h1.SampleOccurances.Count == 1)
					{
						aga = NewMethod(diff, h1, h2, highs);
					}
					else if (h2.SampleOccurances.Count == 1)
					{
						aga = NewMethod(diff, h2, h1, highs);
					}
					else if (l1.SampleOccurances.Count == 1)
					{
						aga = NewMethod(diff, l1, l2, lows);
					}
					else if (l2.SampleOccurances.Count == 1)
					{
						aga = NewMethod(diff, l2, l1, lows);
					}


					// For komplisert... bare sjekk om det er noen enkeltslengere vi kan ta, kan godt jobbe recursivt.
					// Kan merge enkeltslenger inntil 4-5 samples unna?
					if (aga)
						goto doAgain; //TODO: hvis vi kuttet for mye så drit å det...
				}
			}





				//if (i == 1) // skip synch LOW
				//{
				//    s.GeneralizedSamples = s.Samples;
				//    Log(string.Format("{0} for {1} RAW samples.", s.Value ? "H" : "L", s.Samples, s.Samples));
				//}
				//else
//				{





		private bool NewMethod(int diff, GenEntry h1, GenEntry h2, List<GenEntry> list)
		{
			KeyValuePair<int, int> en1 = GetOneEntry(h1.SampleOccurances);
			if (en1.Value == 1) // 1 occ
				foreach (var kv1 in h2.SampleOccurances)
				{
					int diff1 = kv1.Key - en1.Key;
					if (Math.Abs(diff1) <= 4)
					{
						// den kan merges

						list.Remove(h1);
						h2.SampleOccurances[en1.Key] += en1.Value;

						return true;
					}
				}
			return false;
		}




		private void Merge(GenEntry ge, GenEntry gother, Hashtable<int, GenEntry> ht)
		{
			foreach (var kv in gother.SampleOccurances)
			{
				ge.SampleOccurances[kv.Key] += kv.Value;
				ht[kv.Key] = ge;
			}

			gother.SampleOccurances = null;
		}


		private KeyValuePair<int, int> GetOneEntry(Hashtable<int, int> hashtable)
		{
			foreach (var kv in hashtable)
				return kv;

			throw new Exception("err");
		}

	
	



	//if (!fixupDone)
			//{
			//    fixupDone = true;

			//    int diff = totSamplesGen - totSamples;
			//    if (Math.Abs(diff) > 4)
			//    {
			//        Log("diff too high " + diff);

			//        GenEntry h1 = highs[0];
			//        GenEntry h2 = highs[1];

			//        GenEntry l1 = lows[0];
			//        GenEntry l2 = lows[1];

			//        bool aga = false;
			//        if (h1.SampleOccurances.Count == 1)
			//        {
			//            aga = NewMethod(diff, h1, h2, highs);
			//        }
			//        else if (h2.SampleOccurances.Count == 1)
			//        {
			//            aga = NewMethod(diff, h2, h1, highs);
			//        }
			//        else if (l1.SampleOccurances.Count == 1)
			//        {
			//            aga = NewMethod(diff, l1, l2, lows);
			//        }
			//        else if (l2.SampleOccurances.Count == 1)
			//        {
			//            aga = NewMethod(diff, l2, l1, lows);
			//        }
		

			//        // For komplisert... bare sjekk om det er noen enkeltslengere vi kan ta, kan godt jobbe recursivt.
			//        // Kan merge enkeltslenger inntil 4-5 samples unna?
			//        if (aga)
			//            goto doAgain; //TODO: hvis vi kuttet for mye så drit å det...
			//    }
			//}



		//private bool NewMethod(int diff, GenEntry h1, GenEntry h2, List<GenEntry> list)
		//{
		//    KeyValuePair<int, int> en1 = GetOneEntry(h1.SampleOccurances);
		//    if (en1.Value == 1) // 1 occ
		//        foreach (var kv1 in h2.SampleOccurances)
		//        {
		//            int diff1 = kv1.Key - en1.Key;
		//            if (Math.Abs(diff1) <= 5)
		//            {
		//                // den kan merges

		//                list.Remove(h1);
		//                h2.SampleOccurances[en1.Key] += en1.Value;

		//                return true;
		//            }
		//        }
		//    return false;
		//}

		//private KeyValuePair<int, int> GetOneEntry(Hashtable<int, int> hashtable)
		//{
		//    foreach (var kv in hashtable)
		//        return kv;

		//    throw new Exception("err");
		//}

	Hashtable<Sample, int> backupGenVal = new Hashtable<Sample,int>();
				foreach (var sam in samples)
					backupGenVal.Add(sam, sam.GeneralizedSamples);

				while (CountGenEntrys(highs) > 1) // try with 1 high
				{
					if (!Reduce(highs, b))
						b++;
				}


				int newTotSamples = 0;
				int newTotSamplesGen = 0;
				calced = new HashSet<GenEntry>();
				for (int i = 0; i < samples.Count; i++)
				{
					Sample s = samples[i];

					if (i == 1) // skip synch LOW
					{
						s.GeneralizedSamples = s.Samples;
						
					}
					else
					{
						if (s.Value)
							CalcOccurance(highs, s, calced);
						else
							CalcOccurance(lows, s, calced);
					}

					newTotSamples += s.Samples;
					newTotSamplesGen += s.GeneralizedSamples;
				}

				if (newTotSamples != totSamples)
					throw new Exception("err");

				// recalc values. if better now
				bool better = false;
				if (newTotSamplesGen < totSamplesGen && newTotSamplesGen >= totSamples)
					better = true;
				else
				{
					// gjør det om
				}

				if (!better)
				{
					//restore
					foreach (var kv in backupGenVal)
						kv.Key.GeneralizedSamples = kv.Value;
				}
				else
				{
					Log("better");
					totSamples = 0; totSamplesGen = 0;
					for (int i = 0; i < samples.Count; i++)
					{
						Sample s = samples[i];
						Log(string.Format("{0} for {1} gen. samples.", s.Value ? "H" : "L", s.GeneralizedSamples, s.Samples));

						totSamples += s.Samples;
						totSamplesGen += s.GeneralizedSamples;
					}
					Log(string.Format("TotSamples {0}, TotGENSamples {1}", totSamples, totSamplesGen));
				}
		private void WriteFileHeader(string WavFilename, DirectX.WaveFormat WaveFormat)
		{
			bw = new BinaryWriter(new FileStream(WavFilename, FileMode.Create));

			char[] Riff = { 'R', 'I', 'F', 'F' };
			char[] Wave = { 'W', 'A', 'V', 'E' };
			char[] Fmt = { 'f', 'm', 't', ' ' };
			char[] Data = { 'd', 'a', 't', 'a' };
			short padding = 1;
			int formatLength = 0x10;
			int length = 0; // fill this in later!
			short shBytesPerSample = 2; // changing the WaveFormat recording parameters will impact on this
			// see referenced blog posts for more details

			bw.Write(Riff);
			bw.Write(length);
			bw.Write(Wave);
			bw.Write(Fmt);
			bw.Write(formatLength);
			bw.Write(padding);
			bw.Write(WaveFormat.Channels);
			bw.Write(WaveFormat.SamplesPerSecond);
			bw.Write(WaveFormat.AverageBytesPerSecond);
			bw.Write(shBytesPerSample);
			bw.Write(WaveFormat.BitsPerSample);
			bw.Write(Data);
			bw.Write((int)0); // update sample later
		}

}
#endif