#include "pch.h"
#include "VideoSampleProcessor.h"

using namespace Microsoft::Media::FragmentedMediaSink;

VideoSampleProcessor::VideoSampleProcessor(IVector<IFragmentController^>^ fragmentControllers, IMFMediaType* spInputMediaType, unsigned int inputStreamIndex)
	:ISampleProcessor()
	, _currentFragmentDuration(0)
	, _currentFragmentOffset(0)
	, _fragmentEndTime(0)
	, _nextSampleTime(0)
	, _isFragmentInitialized(false)
	, _samplesInFragment(0)
	, _fragmentIndex(0)
{
	// find the fragment duration
	for each (auto fragmentController in fragmentControllers)
	{
		_maxFragmentDuration = fragmentController->FragmentSize.Duration;
		break;
	}

	// calculate the default sample duration based on the frame rate
	UINT32 num, denom;
	HRESULT hr = MFGetAttributeRatio(spInputMediaType, MF_MT_FRAME_RATE, &num, &denom);
	if (SUCCEEDED(hr))
	{
		_defaultSampleDuration = llroundl((long double)10000000 * denom / num);
		// adjust the fragment duration to be a multiple of the sample duration.
		_maxFragmentDuration = _defaultSampleDuration * llroundl((long double)_maxFragmentDuration / _defaultSampleDuration);
	}

	if (SUCCEEDED(hr))
	{
		// HACK: video frame rate can be anything as long as it's different from output framerate. 
		// If we don't do this, fragments with matching resolutions will have different _fragmentDurations from those that do not.
		if (fragmentControllers->Size > 1)
		{
			UINT32 width, height;
			hr = MFGetAttributeSize(spInputMediaType, MF_MT_FRAME_SIZE, &width, &height);

			if (SUCCEEDED(hr))
			{
				int matchedResolutions = 0;
				int unmatchedResolutions = 0;
				for each (IFragmentController^	fragmentController in fragmentControllers)
				{
					VideoEncodingProperties^ videoEncodingProperties = dynamic_cast<VideoEncodingProperties^>(fragmentController->EncodingProperties);
					if (width == videoEncodingProperties->Width || height == videoEncodingProperties->Height)
					{
						matchedResolutions++;
					}
					else
					{
						unmatchedResolutions++;
					}
				}

				if (matchedResolutions > 0 && unmatchedResolutions > 0)
				{
					if (SUCCEEDED(hr))
					{
						hr = MFSetAttributeRatio(spInputMediaType, MF_MT_FRAME_RATE, num * 2, denom);
					}
				}
			}
		}
	}

	ISampleProcessor::Initialize(fragmentControllers, spInputMediaType, inputStreamIndex);
}

VideoSampleProcessor::~VideoSampleProcessor()
{
}

HRESULT VideoSampleProcessor::ProcessSample(_In_ IMFSample* spSample)
{
	HRESULT hr = S_OK;

	long long sampleTime;
	if (SUCCEEDED(hr))
	{
		hr = spSample->GetSampleTime(&sampleTime);
	}

	long long sampleDuration;
	if (SUCCEEDED(hr))
	{
		hr = spSample->GetSampleDuration(&sampleDuration);
		if (SUCCEEDED(hr))
		{
			if (sampleDuration <= 0) // Surface RT has sample duration of zero.
			{
				sampleDuration = _defaultSampleDuration;
			}
		}
	}

	if (SUCCEEDED(hr))
	{
		if (!_isFragmentInitialized)
		{
			hr = InitializeFragments();
		}
	}

	if (SUCCEEDED(hr))
	{
		long long sampleEndTime = sampleTime + sampleDuration;

		long long desiredSampleDuration = sampleEndTime - _nextSampleTime;
		long long roundedSampleDuration = sampleDuration * llroundl((long double)(desiredSampleDuration) / sampleDuration);
		long long roundedSampleEndTime = _nextSampleTime + roundedSampleDuration;

		if (sampleEndTime >= _fragmentEndTime || roundedSampleEndTime >= _fragmentEndTime)
		{
			// crop end of sample to end at end of fragment
			hr = spSample->SetSampleTime(_nextSampleTime);

			if (SUCCEEDED(hr))
				hr = spSample->SetSampleDuration(_fragmentEndTime - _nextSampleTime);

			if (SUCCEEDED(hr))
				hr = WriteSample(spSample);

			if (SUCCEEDED(hr))
				hr = TerminateFragments();
		}
		else if (roundedSampleDuration > 0) // else drop sample
		{
			hr = spSample->SetSampleTime(_nextSampleTime);

			if (SUCCEEDED(hr))
				hr = spSample->SetSampleDuration(roundedSampleDuration);

			if (SUCCEEDED(hr))
				hr = WriteSample(spSample);
		}
	}

	return hr;
}

HRESULT VideoSampleProcessor::SendTick(_In_ long long timeStamp)
{
	return ISampleProcessor::SendTick(timeStamp - _currentFragmentOffset);
}

HRESULT VideoSampleProcessor::WriteSample(_In_ IMFSample* spSample)
{
	HRESULT hr = S_OK;

	long long sampleTime;
	if (SUCCEEDED(hr))
	{
		hr = spSample->GetSampleTime(&sampleTime);
	}

	long long sampleDuration;
	if (SUCCEEDED(hr))
	{
		hr = spSample->GetSampleDuration(&sampleDuration);
	}

	if (SUCCEEDED(hr))
	{
		_nextSampleTime = sampleTime + sampleDuration;
		_currentFragmentDuration = _nextSampleTime - _currentFragmentOffset;

		hr = spSample->SetSampleTime(sampleTime - _currentFragmentOffset);

		if (SUCCEEDED(hr))
		{
			ISampleProcessor::ProcessSample(spSample);
		}

		_samplesInFragment++;
	}

	return hr;
}

HRESULT VideoSampleProcessor::InitializeFragments()
{
	HRESULT hr = ISampleProcessor::InitializeFragments(_fragmentIndex);

	if (SUCCEEDED(hr))
	{
		_currentFragmentDuration = 0;
		_currentFragmentOffset = _nextSampleTime;
		_fragmentEndTime = _currentFragmentOffset + _maxFragmentDuration;
		_isFragmentInitialized = true;
	}

	return hr;
}

HRESULT VideoSampleProcessor::TerminateFragments()
{
	HRESULT hr = ISampleProcessor::TerminateFragments(&_currentFragmentOffset, &_nextSampleTime);

	_fragmentIndex++;
	_samplesInFragment = 0;
	_isFragmentInitialized = false;

	return hr;
}