﻿#include "pch.h"

#include "AudioFX.h"
#include "AudioInterfaceActivator.h"
#include <set>
#include <list>


using namespace GuitarFX_Audio;
using namespace Platform;
using namespace Windows::Media::Devices;

PCWSTR WORK_QUEUE_CLASS = L"Audio";

#pragma region Startup/Shutdown

/*
*	Initializes the library for use
*	
*	Parameters: none
*	
*	Returns: void
*	
*	Throws: Platform::COMException if Media foundation startup is not successsful
*/
void AudioFX::Startup()
{
	HRESULT hr = MFStartup(MF_VERSION);
	if (FAILED(hr))
		throw ref new Platform::COMException(hr);
}
/*
*	Shuts down the library
*
*	Parameters: none
*
*	Returns: void
*
*	Throws: Platform::COMException if Media foundation shutdown is not successsful
*/
void AudioFX::Shutdown()
{
	HRESULT hr = MFShutdown();
	if (FAILED(hr))
		throw ref new Platform::COMException(hr);

}

#pragma endregion

#pragma region Contructor and Destructor code

AudioFX::AudioFX(Platform::String ^captureDeviceId, Platform::String ^renderDeviceId, ComPtr<IAudioClient2> captureClient, ComPtr<IAudioClient2> renderClient) :
m_State(AudioFXStateEnum::UnInitialized),
m_CaptureCallback(this, &AudioFX::OnCaptureCallback),
m_ProcessDataCallback(this, &AudioFX::OnProcessDataCallback),
m_CaptureDeviceId(captureDeviceId),
m_RenderDeviceId(renderDeviceId),
m_CaptureClient(captureClient),
m_RenderClient(renderClient),
m_MasterVolume(1.0f),
m_DelayFeedback(0.5f),
m_DelayFrameOffset(-24000),
m_hCaptureCallbackEvent(INVALID_HANDLE_VALUE),
m_dwMFAudioWorkQueueId(0),
m_pCaptureFormat(NULL)
{

}

AudioFX::~AudioFX()
{
	State = AudioFXStateEnum::Stopping;
	m_Capture = nullptr;
	m_Render = nullptr;
	m_CaptureClient = nullptr;
	m_RenderClient = nullptr;
	m_CaptureCallbackResult = nullptr;

	if (m_pCaptureFormat != NULL)
		CoTaskMemFree(m_pCaptureFormat);

	CloseHandle(m_hCaptureCallbackEvent);

	MFUnlockWorkQueue(m_dwMFAudioWorkQueueId);

}

#pragma endregion

#pragma region Initialization methods

static Platform::String ^PropKey_RawProcessingSupported = L"System.Devices.AudioDevice.RawProcessingSupported";

/* 
	Query if the device supports raw stream 
	
*/
task<bool> AudioFX::SupportsRawStream(Platform::String ^deviceId)
{
	Platform::Collections::Vector<Platform::String ^> ^properties = ref new Platform::Collections::Vector<Platform::String ^>();
	properties->Append(PropKey_RawProcessingSupported);

	return create_task(Windows::Devices::Enumeration::DeviceInformation::CreateFromIdAsync(deviceId, properties)).then(
		[](Windows::Devices::Enumeration::DeviceInformation ^device)
	{
		bool bIsSupported = false;
		if (device->Properties->HasKey(PropKey_RawProcessingSupported) == true)
		{
			bIsSupported = safe_cast<bool>(device->Properties->Lookup(PropKey_RawProcessingSupported));
		}
		return bIsSupported;
	}
	);
}

Windows::Foundation::IAsyncOperation<AudioFX ^> ^AudioFX::CreateAsync(Platform::String ^captureDeviceId, Platform::String ^renderDeviceId)
{
	return create_async(
		[captureDeviceId, renderDeviceId]()-> task<AudioFX ^>
	{

		return CAudioInterfaceActivator::ActivateAsync(captureDeviceId->Data()).then(
			[captureDeviceId, renderDeviceId](ComPtr<IAudioClient2> captureClient)

		{
			return CAudioInterfaceActivator::ActivateAsync(renderDeviceId->Data()).then(
				[captureDeviceId, renderDeviceId, captureClient](ComPtr<IAudioClient2> renderClient)
				
			{
				return ref new AudioFX(captureDeviceId, renderDeviceId, captureClient, renderClient);
			}
			);
		}
		);
	}
	);
}


void AudioFX::InitObjects()
{
	// Create event object for capture callback wait
	m_hCaptureCallbackEvent = ::CreateEventEx(NULL, NULL, 0, EVENT_ALL_ACCESS);
	CHECK_AND_THROW_WIN32(m_hCaptureCallbackEvent == NULL);

}


void AudioFX::InitWorkQ()
{
	// Get the Pro Audio work queue identifier
	DWORD dwTaskId = 0;
	HRESULT hr = MFLockSharedWorkQueue(WORK_QUEUE_CLASS, WORK_QUEUE_BASE_PRIORITY, &dwTaskId, &m_dwMFAudioWorkQueueId);

	CHECK_AND_THROW(hr);

	// Now set the IMFAsyncCallback parameter for the queue id to place the callbacks
	// into the right high priority queue
	m_CaptureCallback.SetQueueID(m_dwMFAudioWorkQueueId);
	m_ProcessDataCallback.SetQueueID(m_dwMFAudioWorkQueueId);

	// Create the IAsyncResults
	hr = MFCreateAsyncResult(nullptr, &m_CaptureCallback, nullptr, &m_CaptureCallbackResult);
	CHECK_AND_THROW(hr);

	hr = MFCreateAsyncResult(nullptr, &m_ProcessDataCallback, nullptr, &m_ProcessCallbackResult);
	CHECK_AND_THROW(hr);

}

/*
Initialize the give WASAPI audio client stream with given parameters. 
If requested periodicity is not aligned then calculate the aligned value and retry
*/
HRESULT AudioFX::InitAudioClient(const ComPtr<IAudioClient2> audioClient, const WAVEFORMATEX *pFormat, const DWORD streamFlags)
{
	// Initialize a shared mode stream with default device period
	HRESULT hr = audioClient->Initialize(
		_AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED,
		streamFlags,
		0,
		0,
		pFormat,
		NULL);

	if (hr == AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
	{
		UINT32 nFrames = 0;
		hr = audioClient->GetBufferSize(&nFrames);

		// Calculate period that would equal to to the duration of proposed buffer size
		REFERENCE_TIME alignedBufferSize = (REFERENCE_TIME) (1e7 * double(nFrames) / double(pFormat->nSamplesPerSec) + 0.5);

		hr = audioClient->Initialize(
			_AUDCLNT_SHAREMODE::AUDCLNT_SHAREMODE_SHARED,
			streamFlags,
			alignedBufferSize,
			alignedBufferSize,
			pFormat,
			NULL);
	}
	CHECK_RESULT(hr, exit);
exit:
	return hr;
}

void AudioFX::InitCapture(AUDCLNT_STREAMOPTIONS options)
{

	// Set the capture client options
	AudioClientProperties captureProperties = {
		sizeof(AudioClientProperties),
		FALSE,	// Capture stream is not hardware accelerated
		AudioCategory_Other,	// Capture stream supports only "Other" category
		options
	};
	
	HRESULT hr = m_CaptureClient->SetClientProperties(&captureProperties);
	CHECK_AND_THROW(hr);

	hr = InitAudioClient(m_CaptureClient, m_pCaptureFormat, AUDCLNT_STREAMFLAGS_EVENTCALLBACK | AUDCLNT_STREAMFLAGS_NOPERSIST);
	CHECK_AND_THROW(hr);

	hr = m_CaptureClient->SetEventHandle(m_hCaptureCallbackEvent);
	CHECK_AND_THROW(hr);

	// Retrieve the IAudioCaptureClient interface
	hr = m_CaptureClient->GetService(__uuidof(IAudioCaptureClient), &m_Capture);
	CHECK_AND_THROW(hr);

}

void AudioFX::InitRender(AUDCLNT_STREAMOPTIONS options)
{
	// Set render client properties
	AudioClientProperties renderProperties = {
		sizeof(AudioClientProperties),
		FALSE,	// Not using hardware acceleration
		AudioCategory_ForegroundOnlyMedia,
		options	
	};

	HRESULT hr = m_RenderClient->SetClientProperties(&renderProperties);
	CHECK_AND_THROW(hr);

	hr = InitAudioClient(m_RenderClient, m_pCaptureFormat, AUDCLNT_STREAMFLAGS_NOPERSIST);
	CHECK_AND_THROW(hr);

	hr = m_RenderClient->GetService(__uuidof(IAudioRenderClient), &m_Render);
	CHECK_AND_THROW(hr);

}

void AudioFX::InitBuffers()
{
	// Allocate enough buffer to keep captured frames for processing
	UINT32 nCaptureBufferFrames = 0;
	
	HRESULT hr = m_CaptureClient->GetBufferSize(&nCaptureBufferFrames);
	CHECK_AND_THROW(hr);

	m_CaptureBuffer.Resize(nCaptureBufferFrames * 8 * m_pCaptureFormat->nChannels);	// Get 8 times the buffer

	m_RenderBuffer.Resize((int) ((MAX_DELAY_HNS * m_pCaptureFormat->nSamplesPerSec / 10000000) + nCaptureBufferFrames * 8) * m_pCaptureFormat->nChannels); // Allocate maximum delay + 8 times the buffer to leave enough room
}

Windows::Foundation::IAsyncAction ^AudioFX::InitializeAsync()
{
	if (State != AudioFXStateEnum::UnInitialized)	// Cannot initialize if in any other state as uninitialized
		throw ref new Platform::COMException(E_NOT_VALID_STATE);


	HRESULT hr = m_CaptureClient->GetMixFormat(&m_pCaptureFormat);

	m_DelayFrameOffset = (int) (-m_DelayTime.Duration * m_pCaptureFormat->nSamplesPerSec / 10000000);

	WAVEFORMATEX *pRenderFormat;
	hr = m_RenderClient->GetMixFormat(&pRenderFormat);

	// ******* IMPORTANT *******
	// For the simplicity this sample does not do any data or sample rate conversion. Thus it expects to have same sample rates for 
	// capture and render and use float format. The sample rates can be changed through control panel (Audio Devices)

	// Assuming that the mix format uses float
	assert(m_pCaptureFormat->wBitsPerSample == 32);

	// Assuming that the render sample rate is same as capture. 
	assert(pRenderFormat->nSamplesPerSec == m_pCaptureFormat->nSamplesPerSec);

	// Release memory, we are using the capture device mix format structure
	CoTaskMemFree(pRenderFormat);

	return create_async(
		[this]()  ->task<void>
	{
		return (SupportsRawStream(m_CaptureDeviceId) && SupportsRawStream(m_RenderDeviceId)).then([this](std::vector<bool> rawSupport)
		{

			InitObjects();
			InitWorkQ();

			// Request raw stream if supported
			InitCapture(rawSupport[0] == true ? AUDCLNT_STREAMOPTIONS::AUDCLNT_STREAMOPTIONS_RAW : AUDCLNT_STREAMOPTIONS::AUDCLNT_STREAMOPTIONS_NONE);
			InitRender(rawSupport[1] == true ? AUDCLNT_STREAMOPTIONS::AUDCLNT_STREAMOPTIONS_RAW : AUDCLNT_STREAMOPTIONS::AUDCLNT_STREAMOPTIONS_NONE);

			InitBuffers();

			State = AudioFXStateEnum::Ready;

		},task_continuation_context::use_arbitrary());
	});
}

#pragma endregion

#pragma region Control methods

void AudioFX::Start()
{
	if (State != AudioFXStateEnum::Ready)
		throw ref new Platform::COMException(E_NOT_VALID_STATE);

	State = AudioFXStateEnum::Starting;

	m_u64ExpectedCapturePosition = 0;

	// Start render client
	HRESULT hr = m_RenderClient->Start();
	CHECK_AND_THROW(hr);

	// Start capture client
	hr = m_CaptureClient->Start();
	CHECK_AND_THROW(hr);

	// Add work queue waiting task for capture
	hr = MFPutWaitingWorkItem(m_hCaptureCallbackEvent, CAPTURE_TASK_PRIORITY, m_CaptureCallbackResult.Get(), &m_CaptureCallbackKey);
	CHECK_AND_THROW(hr);

	CHECK_AND_THROW(hr);

	State = AudioFXStateEnum::Started;
}

void AudioFX::Stop()
{
	if (State != AudioFXStateEnum::Started)
		throw ref new Platform::COMException(E_NOT_VALID_STATE);
	State = AudioFXStateEnum::Stopping;

	// Cancel pending operations
	HRESULT hr = MFCancelWorkItem(m_CaptureCallbackKey);
	CHECK_AND_LOG(hr);

	hr = MFCancelWorkItem(m_ProcessDataCallbackKey);
	CHECK_AND_LOG(hr);

	hr = m_CaptureClient->Stop();
	CHECK_AND_THROW(hr);

	hr = m_RenderClient->Stop();
	CHECK_AND_THROW(hr);

	State = AudioFXStateEnum::Ready;
}

#pragma endregion

#pragma region Capture processing


HRESULT AudioFX::OnCaptureCallback(IMFAsyncResult *pResult)
{
	if (State == AudioFXStateEnum::Started)
	{	
		HRESULT hr = S_OK;
		UINT64 u64DevicePosition = 0, u64QPC = 0;
		DWORD dwFlags = 0;
		UINT32 nFramesAvailable = 0;
		LPBYTE pCaptureData = NULL;
		hr = m_Capture->GetBuffer(&pCaptureData, &nFramesAvailable, &dwFlags, &u64DevicePosition, &u64QPC);
		CHECK_RESULT(hr, exit);

		if (!((dwFlags & _AUDCLNT_BUFFERFLAGS::AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY)== 0 || u64DevicePosition == 0))
		{
			m_RenderBuffer.Flush();
		}


		if (nFramesAvailable != 0)
		{
			UINT32 nGapFrames = 0;

			// If device position is not matching the expected position and this is not the first call after start
			// Then we are missing frames - insert silence to the capture buffer
			if (u64DevicePosition != m_u64ExpectedCapturePosition && m_u64ExpectedCapturePosition != 0)
			{
				nGapFrames = (UINT32) (u64DevicePosition - m_u64ExpectedCapturePosition);
			}

			m_CaptureBuffer.InsertData(pCaptureData, nFramesAvailable * m_pCaptureFormat->nChannels, nGapFrames * m_pCaptureFormat->nChannels);

			m_u64ExpectedCapturePosition = u64DevicePosition + nFramesAvailable;
		}

		hr = m_Capture->ReleaseBuffer(nFramesAvailable);
		CHECK_RESULT(hr, exit);

	exit:

		// Schedule the next capture callback event
		hr = MFPutWaitingWorkItem(m_hCaptureCallbackEvent, CAPTURE_TASK_PRIORITY, m_CaptureCallbackResult.Get(), &m_CaptureCallbackKey);
		CHECK_AND_LOG(hr);

		// Schedule a processing capture data processing task
		if (nFramesAvailable != 0)
		{
			hr = MFPutWorkItemEx2(m_dwMFAudioWorkQueueId, PROCESS_TASK_PRIORITY, m_ProcessCallbackResult.Get());
			CHECK_AND_LOG(hr);
		}

		return hr;
	}
	else
		return S_OK;

}

#pragma endregion

#pragma region Processing and Render

/*
	This method is executed when capture data is to be processed asynchronously at realtime priority thread.
	Unpack the state variable passed in containing capture data and pass it on to ProcessData for further processing
*/
HRESULT AudioFX::OnProcessDataCallback(IMFAsyncResult *pResult)
{
	HRESULT hr = S_OK;

	// If the state is anything else than started then do nothing as likely the component is being stopped or shut down
	if (State == AudioFXStateEnum::Started)
	{
		unsigned dataStartIndex = 0, nDataSamples = 0;
		m_CaptureBuffer.GetReadBuffer(dataStartIndex, nDataSamples);
		// g_LogChannel->LogValuePair("Process data", nDataSamples);
		m_CaptureBuffer.ReleaseReadBuffer(nDataSamples);	// Consume all the unread data

		if (nDataSamples != 0)
		{
			auto capturePtr = m_CaptureBuffer.GetPtrAt(dataStartIndex);
			auto renderPtr = m_RenderBuffer.GetReadPtr();
			std::vector<float> outData(nDataSamples);

			for (unsigned sampleIndex = 0; sampleIndex < nDataSamples; sampleIndex += m_pCaptureFormat->nChannels)
			{
				for (unsigned channel = 0; channel < m_pCaptureFormat->nChannels; channel++)
				{
					float outValue = m_MasterVolume * (capturePtr[sampleIndex + channel] + 
										renderPtr[m_DelayFrameOffset + sampleIndex + channel] * m_DelayFeedback); // Mix with saved samples
					
					if (outValue > 1.0)
						outValue = 1.0;
					if (outValue < -1.0)
						outValue = -1.0;

					outData[sampleIndex + channel] = outValue;
				}
			}

			m_RenderBuffer.InsertData(outData.data(), nDataSamples, 0);	// Using frames in units
		}

		// Now render as many samples available but not more than render buffer available
		
		UINT32 nPadding = 0;
		m_RenderClient->GetCurrentPadding(&nPadding);

		UINT32 nBuffer = 0;
		m_RenderClient->GetBufferSize(&nBuffer);


		unsigned renderDataStartIndex = 0, nBufferSampleCount = 0;
		m_RenderBuffer.GetReadBuffer(renderDataStartIndex, nBufferSampleCount);

		UINT32 nSamplesToRender = std::min<unsigned>(nBufferSampleCount, (nBuffer - nPadding)* m_pCaptureFormat->nChannels);

		m_RenderBuffer.ReleaseReadBuffer(nSamplesToRender);

		if (nSamplesToRender > 0)
		{
			UINT32 nFramesToRender = nSamplesToRender / m_pCaptureFormat->nChannels;

			LPBYTE pRenderData;
			m_Render->GetBuffer(nFramesToRender, &pRenderData);

			m_RenderBuffer.CopyData(pRenderData, renderDataStartIndex, nSamplesToRender);

			m_Render->ReleaseBuffer(nFramesToRender, 0);
		}

	}

	return hr;
}


#pragma endregion

