﻿Imports NAudio.Wave
Imports NAudio.CoreAudioApi
Imports System.Threading
Imports System.Runtime.InteropServices

Namespace Audio.Outputs
	''' <summary>
	''' Support for playback using Wasapi
	''' </summary>
	Public Class WasapiOut
		'Implements IWavePlayer
		Implements IWavePosition
		Private audioClient As AudioClient
		Private ReadOnly mmDevice As MMDevice
		Private ReadOnly shareMode As AudioClientShareMode
		Private renderClient As AudioRenderClient
		Private sourceProvider As IWaveProvider
		Private latencyMilliseconds As Integer
		Private bufferFrameCount As Integer
		Private bytesPerFrame As Integer
		Private ReadOnly isUsingEventSync As Boolean
		Private frameEventWaitHandle As EventWaitHandle
		Private readBuffer As Byte()
		Private m_playbackState As PlaybackState
		Private _playThread As Thread
		Private outputFormat As WaveFormat
		Private dmoResamplerNeeded As Boolean
		Private ReadOnly syncContext As SynchronizationContext

		''' <summary>
		''' Playback Stopped
		''' </summary>
		Public Event PlaybackStopped As EventHandler(Of StoppedEventArgs) 'Implements IWavePlayer.PlaybackStopped

		''' <summary>
		''' WASAPI Out using default audio endpoint
		''' </summary>
		''' <param name="shareMode">ShareMode - shared or exclusive</param>
		''' <param name="latency">Desired latency in milliseconds</param>
		Public Sub New(shareMode As AudioClientShareMode, latency As Integer)

			Me.New(GetDefaultAudioEndpoint(), shareMode, True, latency)
		End Sub

		''' <summary>
		''' WASAPI Out using default audio endpoint
		''' </summary>
		''' <param name="shareMode">ShareMode - shared or exclusive</param>
		''' <param name="useEventSync">true if sync is done with event. false use sleep.</param>
		''' <param name="latency">Desired latency in milliseconds</param>
		Public Sub New(shareMode As AudioClientShareMode, useEventSync As Boolean, latency As Integer)

			Me.New(GetDefaultAudioEndpoint(), shareMode, useEventSync, latency)
		End Sub

		''' <summary>
		''' Creates a new WASAPI Output
		''' </summary>
		''' <param name="device">Device to use</param>
		''' <param name="shareMode"></param>
		''' <param name="useEventSync">true if sync is done with event. false use sleep.</param>
		''' <param name="latency"></param>
		Public Sub New(device As MMDevice, shareMode As AudioClientShareMode, useEventSync As Boolean, latency As Integer)
			Me.audioClient = device.AudioClient
			Me.mmDevice = device
			Me.shareMode = shareMode
			Me.isUsingEventSync = useEventSync
			Me.latencyMilliseconds = latency
			Me.syncContext = SynchronizationContext.Current
		End Sub

		Private Shared Function GetDefaultAudioEndpoint() As MMDevice
			If Environment.OSVersion.Version.Major < 6 Then
				Throw New NotSupportedException("WASAPI supported only on Windows Vista and above")
			End If
			Dim enumerator = New MMDeviceEnumerator()
			Return enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Console)
		End Function

		Private Sub PlayThread()
			Dim resamplerDmoStream As ResamplerDmoStream = Nothing
			Dim playbackProvider As IWaveProvider = Me.sourceProvider
			Dim exception As Exception = Nothing
			Try
				If Me.dmoResamplerNeeded Then
					resamplerDmoStream = New ResamplerDmoStream(sourceProvider, outputFormat)
					playbackProvider = resamplerDmoStream
				End If

				' fill a whole buffer
				bufferFrameCount = audioClient.BufferSize
				bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8
				readBuffer = New Byte(bufferFrameCount * bytesPerFrame - 1) {}
				FillBuffer(playbackProvider, bufferFrameCount)

				' Create WaitHandle for sync
				Dim waitHandles = New WaitHandle() {frameEventWaitHandle}

				audioClient.Start()

				While m_playbackState <> PlaybackState.Stopped
					' If using Event Sync, Wait for notification from AudioClient or Sleep half latency
					Dim indexHandle As Integer = 0
					If isUsingEventSync Then
						indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, False)
					Else
						Thread.Sleep(latencyMilliseconds \ 2)
					End If

					' If still playing and notification is ok
					If m_playbackState = PlaybackState.Playing AndAlso indexHandle <> WaitHandle.WaitTimeout Then
						' See how much buffer space is available.
						Dim numFramesPadding As Integer = 0
						If isUsingEventSync Then
							' In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
							numFramesPadding = If((shareMode = AudioClientShareMode.[Shared]), audioClient.CurrentPadding, 0)
						Else
							numFramesPadding = audioClient.CurrentPadding
						End If
						Dim numFramesAvailable As Integer = bufferFrameCount - numFramesPadding
						If numFramesAvailable > 10 Then
							' see https://naudio.codeplex.com/workitem/16363
							FillBuffer(playbackProvider, numFramesAvailable)
						End If
					End If
				End While
				Thread.Sleep(latencyMilliseconds \ 2)
				audioClient.[Stop]()
				If m_playbackState = PlaybackState.Stopped Then
					audioClient.Reset()
				End If
			Catch e As Exception
				exception = e
			Finally
				If resamplerDmoStream IsNot Nothing Then
					resamplerDmoStream.Dispose()
				End If
				RaisePlaybackStopped(exception)
			End Try
		End Sub

		Private Sub RaisePlaybackStopped(e As Exception)
			'Dim handler = PlaybackStopped
			'If handler IsNot Nothing Then
			If Me.syncContext Is Nothing Then
				'handler(Me, New StoppedEventArgs(e))
				RaiseEvent PlaybackStopped(Me, New StoppedEventArgs(e))
			Else
				syncContext.Post(Sub() RaiseEvent PlaybackStopped(Me, New StoppedEventArgs(e)), Nothing)
				'syncContext.Post(Function(state) handler(Me, New StoppedEventArgs(e)), Nothing)
			End If
			'End If
		End Sub

		Private Sub FillBuffer(playbackProvider As IWaveProvider, frameCount As Integer)
			Dim buffer As IntPtr = renderClient.GetBuffer(frameCount)
			Dim readLength As Integer = frameCount * bytesPerFrame
			Dim read As Integer = playbackProvider.Read(readBuffer, 0, readLength)
			If read = 0 Then
				m_playbackState = PlaybackState.Stopped
			End If
			Marshal.Copy(readBuffer, 0, buffer, read)
			Dim actualFrameCount As Integer = read \ bytesPerFrame
			'if (actualFrameCount != frameCount)
			'            {
			'                Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount ));
			'            }

			renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None)
		End Sub

		''' <summary>
		''' Gets the current position in bytes from the wave output device.
		''' (n.b. this is not the same thing as the position within your reader
		''' stream)
		''' </summary>
		''' <returns>Position in bytes</returns>
		Public Function GetPosition() As Long Implements IWavePosition.GetPosition
			If m_playbackState = NAudio.Wave.PlaybackState.Stopped Then
				Return 0
			End If
			Return CLng(audioClient.AudioClockClient.AdjustedPosition)
		End Function

		''' <summary>
		''' Gets a instance indicating the format the hardware is using.
		''' </summary>
		Public ReadOnly Property OutputWaveFormat() As WaveFormat Implements IWavePosition.OutputWaveFormat
			Get
				Return outputFormat
			End Get
		End Property

#Region "IWavePlayer Members"

		''' <summary>
		''' Begin Playback
		''' </summary>
		Public Sub Play() 'Implements IWavePlayer.Play
			If m_playbackState <> PlaybackState.Playing Then
				If m_playbackState = PlaybackState.Stopped Then
					_playThread = New Thread(AddressOf PlayThread)
					m_playbackState = PlaybackState.Playing
					_playThread.Start()
				Else
					m_playbackState = PlaybackState.Playing


				End If
			End If
		End Sub

		''' <summary>
		''' Stop playback and flush buffers
		''' </summary>
		Public Sub [Stop]()	'Implements IWavePlayer.Stop
			If m_playbackState <> PlaybackState.Stopped Then
				m_playbackState = PlaybackState.Stopped
				_playThread.Join()
				_playThread = Nothing
			End If
		End Sub

		''' <summary>
		''' Stop playback without flushing buffers
		''' </summary>
		Public Sub Pause() ' Implements IWavePlayer.Pause
			If m_playbackState = PlaybackState.Playing Then
				m_playbackState = PlaybackState.Paused
			End If

		End Sub

		''' <summary>
		''' Initialize for playing the specified wave stream
		''' </summary>
		''' <param name="waveProvider">IWaveProvider to play</param>
		Public Sub Init(waveProvider As IWaveProvider) 'Implements IWavePlayer.Init
			Dim latencyRefTimes As Long = latencyMilliseconds * 10000
			outputFormat = waveProvider.WaveFormat
			' first attempt uses the WaveFormat from the WaveStream
			Dim closestSampleRateFormat As WaveFormatExtensible
			If Not audioClient.IsFormatSupported(shareMode, outputFormat, closestSampleRateFormat) Then
				' Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
				' See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx 
				' They say : "In shared mode, the audio engine always supports the mix format"
				' The MixFormat is more likely to be a WaveFormatExtensible.
				If closestSampleRateFormat Is Nothing Then
					Dim correctSampleRateFormat As WaveFormat = audioClient.MixFormat
					'WaveFormat.CreateIeeeFloatWaveFormat(
					'                        audioClient.MixFormat.SampleRate,
					'                        audioClient.MixFormat.Channels);


					If Not audioClient.IsFormatSupported(shareMode, correctSampleRateFormat) Then
						' Iterate from Worst to Best Format
						Dim bestToWorstFormats As WaveFormatExtensible() = {New WaveFormatExtensible(outputFormat.SampleRate, 32, outputFormat.Channels), New WaveFormatExtensible(outputFormat.SampleRate, 24, outputFormat.Channels), New WaveFormatExtensible(outputFormat.SampleRate, 16, outputFormat.Channels)}

						' Check from best Format to worst format ( Float32, Int24, Int16 )
						For i As Integer = 0 To bestToWorstFormats.Length - 1
							correctSampleRateFormat = bestToWorstFormats(i)
							If audioClient.IsFormatSupported(shareMode, correctSampleRateFormat) Then
								Exit For
							End If
							correctSampleRateFormat = Nothing
						Next

						' If still null, then test on the PCM16, 2 channels
						If correctSampleRateFormat Is Nothing Then
							' Last Last Last Chance (Thanks WASAPI)
							correctSampleRateFormat = New WaveFormatExtensible(outputFormat.SampleRate, 16, 2)
							If Not audioClient.IsFormatSupported(shareMode, correctSampleRateFormat) Then
								Throw New NotSupportedException("Can't find a supported format to use")
							End If
						End If
					End If
					outputFormat = correctSampleRateFormat
				Else
					outputFormat = closestSampleRateFormat
				End If

				' just check that we can make it.
				Using New ResamplerDmoStream(waveProvider, outputFormat)
				End Using
				Me.dmoResamplerNeeded = True
			Else
				dmoResamplerNeeded = False
			End If
			Me.sourceProvider = waveProvider

			' If using EventSync, setup is specific with shareMode
			If isUsingEventSync Then
				' Init Shared or Exclusive
				If shareMode = AudioClientShareMode.[Shared] Then
					' With EventCallBack and Shared, both latencies must be set to 0
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0, outputFormat, Guid.Empty)

					' Get back the effective latency from AudioClient
					latencyMilliseconds = CInt(audioClient.StreamLatency / 10000)
				Else
					' With EventCallBack and Exclusive, both latencies must equals
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes, outputFormat, Guid.Empty)
				End If

				' Create the Wait Event Handle
				frameEventWaitHandle = New EventWaitHandle(False, EventResetMode.AutoReset)
				audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle())
			Else
				' Normal setup for both sharedMode
				audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0, outputFormat, Guid.Empty)
			End If

			' Get the RenderClient
			renderClient = audioClient.AudioRenderClient
		End Sub

		''' <summary>
		''' Playback State
		''' </summary>
		Public ReadOnly Property PlaybackState() As PlaybackState 'Implements IWavePlayer.PlaybackState
			Get
				Return m_playbackState
			End Get
		End Property

		''' <summary>
		''' Volume
		''' </summary>
		Public Property Volume() As Single 'Implements IWavePlayer.Volume
			Get
				Return mmDevice.AudioEndpointVolume.MasterVolumeLevelScalar
			End Get
			Set(value As Single)
				If value < 0 Then
					Throw New ArgumentOutOfRangeException("value", "Volume must be between 0.0 and 1.0")
				End If
				If value > 1 Then
					Throw New ArgumentOutOfRangeException("value", "Volume must be between 0.0 and 1.0")
				End If
				mmDevice.AudioEndpointVolume.MasterVolumeLevelScalar = value
			End Set
		End Property

#End Region

#Region "IDisposable Members"

		''' <summary>
		''' Dispose
		''' </summary>
		Public Sub Dispose() 'Implements IDisposable.Dispose
			Dispose(True)
			GC.SuppressFinalize(Me)
		End Sub

		Protected Overridable Sub Dispose(IsDisposing As Boolean)
			If IsDisposing Then
				frameEventWaitHandle.Dispose()
				If audioClient IsNot Nothing Then
					[Stop]()
					audioClient.Dispose()
					audioClient = Nothing
					renderClient = Nothing
				End If
			End If
		End Sub


#End Region
	End Class
End Namespace
