#include "ntddk.h"
#include "AsmCall.h"
#include "ia32_types.h"
#include "Util.h"
#include "common.h"
#include "Vmm.h"
#include "vm.h"
#include "ept.h"

bool IsReleaseBuild() {
#if defined(DBG)
	return false;
#else
	return true;
#endif
}

bool IsX64() {
#if defined(_AMD64_)
	return true;
#else
	return false;
#endif
}


NTSTATUS SetLockBitForEachProcessor(void *context) {
	Ia32FeatureControlMsr dtFeatureControlMsr;
	Asm_rdmsr((ULONG32)Msr::kIa32FeatureControl, &dtFeatureControlMsr.all);
	if (dtFeatureControlMsr.fields.lock)
	{
		KdPrint(("Has been lock!\n"));
		return STATUS_SUCCESS;
	}

	dtFeatureControlMsr.fields.lock = 1;
	Asm_wrmsr((ULONG32)Msr::kIa32FeatureControl, dtFeatureControlMsr.all);

	Asm_rdmsr((ULONG32)Msr::kIa32FeatureControl, &dtFeatureControlMsr.all);
	if (dtFeatureControlMsr.fields.lock)
	{
		return STATUS_SUCCESS;
	}
	else {
		KdPrint(("lock false!\n"));
		return STATUS_UNSUCCESSFUL;
	}
}

bool VmIsSupportEPT() {
	Ia32VmxEptVpidCapMsr dtVmxEptVpidCap;
	Asm_rdmsr((ULONG32)Msr::kIa32VmxEptVpidCap, &dtVmxEptVpidCap.all);
	if (!dtVmxEptVpidCap.fields.support_page_walk_length4 ||
		!dtVmxEptVpidCap.fields.support_write_back_memory_type ||
		!dtVmxEptVpidCap.fields.support_invept ||
		!dtVmxEptVpidCap.fields.support_single_context_invept ||
		!dtVmxEptVpidCap.fields.support_all_context_invept ||
		!dtVmxEptVpidCap.fields.support_invvpid ||
		!dtVmxEptVpidCap.fields.support_individual_address_invvpid ||
		!dtVmxEptVpidCap.fields.support_single_context_invvpid ||
		!dtVmxEptVpidCap.fields.support_all_context_invvpid ||
		!dtVmxEptVpidCap.fields.support_single_context_retaining_globals_invvpid) {
		KdPrint(("EPT Setting: %p \n", (ULONG64)dtVmxEptVpidCap.all));
		return false;
	}
	else {
		return true;
	}
}

bool VmIsWhisperInstalled() {
	ULONG32 szArrary[4];
	Asm_cpuid(1, szArrary);
	CpuFeaturesEcx cpuEcx;
	cpuEcx.all = szArrary[2];
	if (cpuEcx.fields.not_used == 0)
	{
		KdPrint(("[Whisper]CPU not used!\n"));
		return false;
	}
	Asm_cpuid('LLLL', szArrary);
	return szArrary[0] == 'GDoG';
}

void *BuildMsrBitMap() {
	PVOID msr_bitmap = (PVOID)ExAllocatePoolWithTag(NonPagedPool, PAGE_SIZE, kHyperPlatformCommonPoolTag);
	if (msr_bitmap == NULL)
	{
		return 0;
	}

	RtlZeroMemory(msr_bitmap, PAGE_SIZE);
	UCHAR *bitmap_read_low = (UCHAR *)msr_bitmap;
	UCHAR *bitmap_read_high = (UCHAR *)msr_bitmap + 1024;
	RtlFillMemory(bitmap_read_low, 1024, 0xff);
	RtlFillMemory(bitmap_read_high, 1024, 0xff);

	RTL_BITMAP bitmap_read_low_header = {};
	RtlInitializeBitMap(&bitmap_read_low_header, (PULONG)bitmap_read_low, 1024 * 8);

	//ignore IA32_MPERF(0xe7) and IA32_APERF (0xE8)
	RtlClearBits(&bitmap_read_low_header, 0xe7, 2);
	ULONG64 nTempRead;
	for (ULONG nIdex = 0; nIdex < 0x1000; nIdex++)
	{
		__try {
			Asm_rdmsr(nIdex, &nTempRead);
		}
		__except (EXCEPTION_EXECUTE_HANDLER) {
			RtlClearBits(&bitmap_read_low_header, nIdex, 1);
		}
	}

	RTL_BITMAP bitmap_read_high_header;
	RtlInitializeBitMap(&bitmap_read_high_header, (PULONG)bitmap_read_high, 1024 * 8);

	// IA32_GS_BASE (c0000101) and IA32_KERNEL_GS_BASE (c0000102)
	RtlClearBits(&bitmap_read_high_header, 0x101, 2);

	return msr_bitmap;
}

PVOID VmpBuildIoBitMap() {
	PVOID IoBitMap = (PVOID)ExAllocatePoolWithTag(NonPagedPool, PAGE_SIZE * 2, kHyperPlatformCommonPoolTag);
	if (IoBitMap == NULL)
	{
		return 0;
	}

	RtlZeroMemory(IoBitMap, PAGE_SIZE * 2);
	UCHAR *bitmap_a = (UCHAR *)IoBitMap;
	UCHAR *bitmap_b = (UCHAR *)IoBitMap + PAGE_SIZE;
	RtlFillMemory(bitmap_a, PAGE_SIZE, 0);
	RtlFillMemory(bitmap_b, PAGE_SIZE, 0);

	RTL_BITMAP bitmap_a_header = {};
	RtlInitializeBitMap(&bitmap_a_header, (PULONG)bitmap_a, PAGE_SIZE * 8);

	RTL_BITMAP bitmap_b_header;
	RtlInitializeBitMap(&bitmap_b_header, (PULONG)bitmap_b, PAGE_SIZE * 8);

	return IoBitMap;
}

NTSTATUS VmpStopVm(void *context) {

	// Stop virtualization and get an address of the management structure
	ProcessorData *processor_data = nullptr;
	auto status = Asm_VmCall(HypercallNumber::kTerminateVmm, &processor_data);
	if (status) {
		return STATUS_UNSUCCESSFUL;
	}

	// Clear CR4.VMXE, as there is no reason to leave the bit after vmxoff
	Cr4 cr4 = { Asm_readCr4() };
	cr4.fields.vmxe = false;
	Asm_WriteCr4(cr4.all);

	VmpFreeProcessorData(processor_data);
	return STATUS_SUCCESS;
}

void VmpFreeSharedData(ProcessorData *processor_data) {


	if (!processor_data->shared_data) {
		return;
	}

	if (InterlockedDecrement(&processor_data->shared_data->reference_count) !=
		0) {
		return;
	}

	("Freeing shared data...");
	if (processor_data->shared_data->io_bitmap_a) {
		ExFreePoolWithTag(processor_data->shared_data->io_bitmap_a,
			kHyperPlatformCommonPoolTag);
	}
	if (processor_data->shared_data->msr_bitmap) {
		ExFreePoolWithTag(processor_data->shared_data->msr_bitmap,
			kHyperPlatformCommonPoolTag);
	}
	ExFreePoolWithTag(processor_data->shared_data, kHyperPlatformCommonPoolTag);
}

void VmpFreeProcessorData(ProcessorData *processor_data) {

	if (!processor_data) {
		return;
	}
	if (processor_data->vmm_stack_limit) {
		UtilFreeContiguousMemory(processor_data->vmm_stack_limit);
	}
	if (processor_data->vmcs_region) {
		ExFreePoolWithTag(processor_data->vmcs_region, kHyperPlatformCommonPoolTag);
	}
	if (processor_data->vmxon_region) {
		ExFreePoolWithTag(processor_data->vmxon_region,
			kHyperPlatformCommonPoolTag);
	}
	if (processor_data->ept_data) {
		EptTermination(processor_data->ept_data);
	}

	VmpFreeSharedData(processor_data);

	ExFreePoolWithTag(processor_data, kHyperPlatformCommonPoolTag);
}

bool VmpEnterVmxMode(ProcessorData *processor_data) {


	Cr0 cr0Fixed0, cr0Fixed1;
	Asm_rdmsr((ULONG32)Msr::kIa32VmxCr0Fixed0, &cr0Fixed0.all);
	Asm_rdmsr((ULONG32)Msr::kIa32VmxCr0Fixed1, &cr0Fixed1.all);
	Cr0 cr0;
	cr0.all = Asm_readCr0();
	cr0.all &= cr0Fixed1.all;
	cr0.all |= cr0Fixed0.all;
	Asm_WriteCr0(cr0.all);

	Cr4 cr4fixed0, cr4fixed1;
	Asm_rdmsr((ULONG32)Msr::kIa32VmxCr4Fixed0, &cr4fixed0.all);
	Asm_rdmsr((ULONG32)Msr::kIa32VmxCr4Fixed1, &cr4fixed1.all);
	Cr4 cr4;
	cr4.all = Asm_readCr4();
	cr4.all &= cr4fixed1.all;
	cr4.all |= cr4fixed0.all;
	Asm_WriteCr4(cr4.all);

	Ia32VmxBasicMsr vmx_basic_msr;
	Asm_rdmsr((ULONG32)Msr::kIa32VmxBasic, &vmx_basic_msr.all);
	processor_data->vmxon_region->revision_identifier = vmx_basic_msr.fields.revision_identifier;

	ULONG_PTR vmxon_region_pa = UtilPaFromVa(processor_data->vmxon_region);
	if (Asm_Vmxon((void *)vmxon_region_pa))
	{
		KdPrint(("[Whisper]Vmxon error !\n"));
		return false;
	}

	UtilInveptGlobal();
	UtilInvvipAllContext();
	return true;
}

bool VmpInitializeVmcs(ProcessorData *processor_data) {
	Ia32VmxBasicMsr vmx_basic_msr;
	Asm_rdmsr((ULONG32)(Msr::kIa32VmxBasic), &vmx_basic_msr.all);
	processor_data->vmcs_region->revision_identifier = vmx_basic_msr.fields.revision_identifier;
	ULONG64 vmcs_region_pa = (ULONG64)UtilPaFromVa(processor_data->vmcs_region);

	if (Asm_VmClear((void *)vmcs_region_pa))
	{
		KdPrint(("[Whisper]VmClear error!\n"));
		return false;
	}

	if (Asm_Vmptrld((void *)vmcs_region_pa))
	{
		KdPrint(("[Whisper]Vmptrld error!\n"));
		return false;
	}

	return true;
}

ULONG VmpAdjustControlValue(ULONG32 nMsrIndex, ULONG request_value) {
	ULARGE_INTEGER msr_value;
	Asm_rdmsr(nMsrIndex, &msr_value.QuadPart);

	request_value &= msr_value.HighPart;
	request_value |= msr_value.LowPart;

	return request_value;
}

ULONG VmpGetSegmentAccessRight(USHORT segment_selector) {


	VmxRegmentDescriptorAccessRight access_right = {};
	if (segment_selector) {
		const SegmentSelector ss = { segment_selector };
		auto native_access_right = AsmLoadAccessRightsByte(ss.all);
		native_access_right >>= 8;
		access_right.all = static_cast<ULONG>(native_access_right);
		access_right.fields.reserved1 = 0;
		access_right.fields.reserved2 = 0;
		access_right.fields.unusable = false;
	}
	else {
		access_right.fields.unusable = true;
	}
	return access_right.all;
}

SegmentDescriptor *VmxGetSegmentDescriptor(ULONG_PTR descriptor_table_base, USHORT segment_selector) {
	SegmentSelector ss = { segment_selector };
	return (SegmentDescriptor *)(descriptor_table_base + ss.fields.index * sizeof(SegmentDescriptor));
}

ULONG_PTR VmxGetSegmentBaseByDescriptor(SegmentDescriptor *segment_descriptor) {
	ULONG_PTR base_high = segment_descriptor->fields.base_high << (6 * 4);
	ULONG_PTR base_middle = segment_descriptor->fields.base_mid << (4 * 4);
	ULONG_PTR base_low = segment_descriptor->fields.base_low;

	ULONG_PTR base = (base_high | base_middle | base_low) & MAXULONG;

	if (IsX64() && !segment_descriptor->fields.system)
	{
		base |= (((ULONG64)(((SegmentDesctiptorX64 *)segment_descriptor)->base_upper32)) << 32);
	}

	return base;
}

ULONG_PTR VmxGetSegmentBase(ULONG_PTR gdt_base, USHORT segment_selector) {
	SegmentSelector ss = { segment_selector };
	if (!ss.all)
	{
		return 0;
	}

	if (ss.fields.ti)
	{
		SegmentDescriptor *pLocal_segment_descriptor = VmxGetSegmentDescriptor(gdt_base, Asm_readldtr());
		ULONG_PTR ldt_base = VmxGetSegmentBaseByDescriptor(pLocal_segment_descriptor);
		SegmentDescriptor *segment_descriptor = VmxGetSegmentDescriptor(ldt_base, segment_selector);

		return VmxGetSegmentBaseByDescriptor(segment_descriptor);
	}
	else {
		SegmentDescriptor *segment_descriptor = VmxGetSegmentDescriptor(gdt_base, segment_selector);
		return VmxGetSegmentBaseByDescriptor(segment_descriptor);
	}
}

bool VmpSetupVmcs(const ProcessorData *processor_data, ULONG_PTR guest_stack_pointer, ULONG_PTR guest_instruction_pointer, ULONG_PTR vmm_stack_pointer) {
	Gdtr gdtr = {};
	Asm_SGDT(&gdtr);

	Idtr idtr = {};
	Asm_SIDT(&idtr);

	ULONG bUseTrueMsrs;
	Ia32VmxBasicMsr dtVmxBasicMsr;
	Asm_rdmsr((ULONG32)Msr::kIa32VmxBasic, &dtVmxBasicMsr.all);
	bUseTrueMsrs = dtVmxBasicMsr.fields.vmx_capability_hint;

	VmxVmEntryControls vm_entryctl_requested = {};
	vm_entryctl_requested.all = 0;
	vm_entryctl_requested.fields.load_debug_controls = true;
	vm_entryctl_requested.fields.ia32e_mode_guest = IsX64();

	VmxVmEntryControls vm_entryctl;
	vm_entryctl.all = VmpAdjustControlValue(bUseTrueMsrs ? \
		(ULONG32)(Msr::kIa32VmxTrueEntryCtls) : \
		(ULONG32)(Msr::kIa32VmxEntryCtls), \
		vm_entryctl_requested.all);

	VmxVmExitControls vm_exitctl_requested = {};
	vm_exitctl_requested.all = 0;
	vm_exitctl_requested.fields.host_address_space_size = IsX64();

	VmxVmExitControls vm_exitctl;
	vm_exitctl.all = VmpAdjustControlValue(bUseTrueMsrs ? \
		(ULONG32)(Msr::kIa32VmxTrueExitCtls) : \
		(ULONG32)(Msr::kIa32VmxExitCtls), \
		vm_exitctl_requested.all);

	VmxPinBasedControls vm_pinctl_requested = {};
	vm_pinctl_requested.all = 0;
	VmxPinBasedControls vm_pinctl;
	vm_pinctl.all = VmpAdjustControlValue(bUseTrueMsrs ? \
		(ULONG32)Msr::kIa32VmxTruePinbasedCtls : \
		(ULONG32)Msr::kIa32VmxPinbasedCtls, \
		vm_pinctl_requested.all);

	VmxProcessorBasedControls vm_procctl_requested;
	vm_procctl_requested.all = 0;
	vm_procctl_requested.fields.cr3_load_exiting = true;
	vm_procctl_requested.fields.mov_dr_exiting = true;
	vm_procctl_requested.fields.use_io_bitmaps = true;
	vm_procctl_requested.fields.use_msr_bitmaps = true;
	vm_procctl_requested.fields.activate_secondary_control = true;
	VmxProcessorBasedControls vmx_procctl;
	vmx_procctl.all = VmpAdjustControlValue(bUseTrueMsrs ? \
		(ULONG32)Msr::kIa32VmxTrueProcBasedCtls : \
		(ULONG32)Msr::kIa32VmxProcBasedCtls, \
		vm_procctl_requested.all);

	VmxSecondaryProcessorBasedControls vm_procctls2_requested;
	vm_procctls2_requested.all = 0;
	vm_procctls2_requested.fields.enable_ept = true;
	//vm_procctls2_requested.fields.descriptor_table_exiting = true;
	vm_procctls2_requested.fields.enable_rdtscp = true; //WIN10
	vm_procctls2_requested.fields.enable_invpcid = true; //WIN10
	vm_procctls2_requested.fields.enable_xsaves_xstors = true; //WIN10
	vm_procctls2_requested.fields.enable_vpid = true;
	VmxSecondaryProcessorBasedControls vm_procctls2;
	vm_procctls2.all = VmpAdjustControlValue(\
		(ULONG32)Msr::kIa32VmxProcBasedCtls2, \
		vm_procctls2_requested.all);

	ULONG_PTR exception_bitmap = 0;
	exception_bitmap |= (1u << (ULONG_PTR)InterruptionVector::kDebugException);

	Cr0 cr0_mask;
	cr0_mask.all = 0;
	Cr0 cr0_shadow;
	cr0_shadow.all = Asm_readCr0();

	Cr4 cr4_mask;
	cr4_mask.all = 0;
	Cr4 cr4_shadow;
	cr4_shadow.all = Asm_readCr4();

	if (UtilIsX86Pae())
	{
		cr0_mask.fields.pg = true;
		cr0_mask.fields.cd = true;
		cr0_mask.fields.nw = true;
		cr4_mask.fields.pae = true;
		cr4_mask.fields.pse = true;
		cr4_mask.fields.smep = true;
	}

	ULONG_PTR nError = 0;
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kVirtualProcessorId, KeGetCurrentProcessorNumberEx(NULL) + 1);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestEsSelector, Asm_readES());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCsSelector, Asm_readCS());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSsSelector, Asm_readSS());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestDsSelector, Asm_readDS());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestFsSelector, Asm_readFS());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGsSelector, Asm_readGS());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestLdtrSelector, Asm_readldtr());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestTrSelector, Asm_readTR());

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostEsSelector, Asm_readES() & 0xf8);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostCsSelector, Asm_readCS() & 0xf8);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostSsSelector, Asm_readSS() & 0xf8);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostDsSelector, Asm_readDS() & 0xf8);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostFsSelector, Asm_readFS() & 0xf8);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostGsSelector, Asm_readGS() & 0xf8);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostTrSelector, Asm_readTR() & 0xf8);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kIoBitmapA, UtilPaFromVa(processor_data->shared_data->io_bitmap_a));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kIoBitmapB, UtilPaFromVa(processor_data->shared_data->io_bitmap_b));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kMsrBitmap, UtilPaFromVa(processor_data->shared_data->msr_bitmap));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kEptPointer, processor_data->ept_data->ept_pointer.all);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kVmcsLinkPointer, MAXULONG64);
	ULONG64 nTempValue;
	Asm_rdmsr((ULONG32)Msr::kIa32Debugctl, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestIa32Debugctl, nTempValue);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kPinBasedVmExecControl, vm_pinctl.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kCpuBasedVmExecControl, vmx_procctl.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kExceptionBitmap, exception_bitmap);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kVmExitControls, vm_exitctl.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kVmEntryControls, vm_entryctl.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kSecondaryVmExecControl, vm_procctls2.all);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestEsLimit, __segmentlimit((ULONG)Asm_readES()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestDsLimit, __segmentlimit((ULONG)Asm_readDS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCsLimit, __segmentlimit((ULONG)Asm_readCS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSsLimit, __segmentlimit((ULONG)Asm_readSS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestFsLimit, __segmentlimit((ULONG)Asm_readFS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGsLimit, __segmentlimit((ULONG)Asm_readGS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestLdtrLimit, __segmentlimit((ULONG)Asm_readldtr()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestTrLimit, __segmentlimit((ULONG)Asm_readTR()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGdtrLimit, gdtr.limit);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestIdtrLimit, idtr.limit);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestEsArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readES()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCsArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readCS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSsArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readSS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestDsArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readDS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestFsArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readFS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGsArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readGS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestLdtrArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readldtr()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestTrArBytes, VmpGetSegmentAccessRight((USHORT)Asm_readTR()));

	Asm_rdmsr((ULONG32)Msr::kIa32SysenterCs, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSysenterCs, nTempValue);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostIa32SysenterCs, nTempValue);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kCr0GuestHostMask, cr0_mask.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kCr4GuestHostMask, cr4_mask.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kCr0ReadShadow, cr0_shadow.all);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kCr4ReadShadow, cr4_shadow.all);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCr0, Asm_readCr0());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCr3, Asm_readCr3());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCr4, Asm_readCr4());

#if defined(_AMD64_)
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestEsBase, 0);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCsBase, 0);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSsBase, 0);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestDsBase, 0);
	Asm_rdmsr((ULONG32)Msr::kIa32FsBase, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestFsBase, nTempValue);
	Asm_rdmsr((ULONG32)Msr::kIa32GsBase, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGsBase, nTempValue);
#else
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestEsBase, VmxGetSegmentBase(gdtr.base, Asm_readES()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestCsBase, VmxGetSegmentBase(gdtr.base, Asm_readCS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSsBase, VmxGetSegmentBase(gdtr.base, Asm_readSS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestDsBase, VmxGetSegmentBase(gdtr.base, Asm_readDS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestFsBase, VmxGetSegmentBase(gdtr.base, Asm_readFS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGsBase, VmxGetSegmentBase(gdtr.base, Asm_readGS()));
#endif

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestLdtrBase, VmxGetSegmentBase(gdtr.base, Asm_readldtr()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestTrBase, VmxGetSegmentBase(gdtr.base, Asm_readTR()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestGdtrBase, gdtr.base);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestIdtrBase, idtr.base);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestDr7, Asm_readDr7());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestRsp, guest_stack_pointer);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestRip, guest_instruction_pointer);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestRflags, Asm_readEflags());
	Asm_rdmsr((ULONG32)Msr::kIa32SysenterEsp, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSysenterEsp, nTempValue);
	Asm_rdmsr((ULONG32)Msr::kIa32SysenterEip, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kGuestSysenterEip, nTempValue);

	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostCr0, Asm_readCr0());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostCr3, Asm_readCr3());
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostCr4, Asm_readCr4());

#if defined(_AMD64_)
	Asm_rdmsr((ULONG32)Msr::kIa32FsBase, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostFsBase, nTempValue);
	Asm_rdmsr((ULONG32)Msr::kIa32GsBase, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostGsBase, nTempValue);
#else
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostFsBase, VmxGetSegmentBase(gdtr.base, Asm_readFS()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostGsBase, VmxGetSegmentBase(gdtr.base, Asm_readGS()));

#endif
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostTrBase, VmxGetSegmentBase(gdtr.base, Asm_readTR()));
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostGdtrBase, gdtr.base);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostIdtrBase, idtr.base);
	Asm_rdmsr((ULONG32)Msr::kIa32SysenterEsp, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostIa32SysenterEsp, nTempValue);
	Asm_rdmsr((ULONG32)Msr::kIa32SysenterEip, &nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostIa32SysenterEip, nTempValue);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostRsp, vmm_stack_pointer);
	nError |= Asm_vmxWrite((ULONG32)VmcsField::kHostRip, (ULONG_PTR)AsmVmmEntryPoint);

	KdPrint(("Error Code: %x \n", nError));
	return nError == 0;
}

void VmpInitializeVm(ULONG_PTR guest_stack_pointer, ULONG_PTR guest_instruction_pointer, void *context) {


	SharedProcessorData *shared_data = (SharedProcessorData *)context;
	if (!shared_data)
	{
		KdPrint(("[Whisper]context is empty!\n"));
		return;
	}

	ProcessorData *processor_data = (ProcessorData *)ExAllocatePoolWithTag(NonPagedPool, sizeof(ProcessorData), kHyperPlatformCommonPoolTag);
	if (!processor_data)
	{
		KdPrint(("[Whisper]Allocate processor_data is empty!\n"));
		return;
	}

	RtlZeroMemory(processor_data, sizeof(ProcessorData));
	processor_data->shared_data = shared_data;
	InterlockedIncrement(&(processor_data->shared_data->reference_count));

	processor_data->ept_data = EptInitialization();
	if (!processor_data->ept_data)
	{
		KdPrint(("[Whisper]Initialization ept_data Error!\n"));
		VmpFreeProcessorData(processor_data);
		return;
	}

	processor_data->vmm_stack_limit = UtilAllocateContiguousMemory(KERNEL_STACK_SIZE);
	if (!processor_data->vmm_stack_limit)
	{
		VmpFreeProcessorData(processor_data);
		KdPrint(("[Whisper]Allocate stack buffer error!\n"));
		return;
	}

	RtlZeroMemory(processor_data->vmm_stack_limit, KERNEL_STACK_SIZE);

	processor_data->vmcs_region = (VmControlStructure *)ExAllocatePoolWithTag(NonPagedPool, 4096, kHyperPlatformCommonPoolTag);
	if (!processor_data->vmcs_region)
	{
		VmpFreeProcessorData(processor_data);
		KdPrint(("[Whisper]Allocate vmcs_region error!\n"));
		return;
	}
	RtlZeroMemory(processor_data->vmcs_region, 4096);

	processor_data->vmxon_region = (VmControlStructure *)ExAllocatePoolWithTag(NonPagedPool, 4096, kHyperPlatformCommonPoolTag);
	if (!processor_data->vmxon_region)
	{
		VmpFreeProcessorData(processor_data);
		KdPrint(("[Whisper]Allocate vmxon_region error!\n"));
		return;
	}
	RtlZeroMemory(processor_data->vmxon_region, 4096);

	PHYSICAL_ADDRESS dtTempAddress;
	dtTempAddress.QuadPart = -1;
	processor_data->pReadBuffer = MmAllocateContiguousMemory(0x1000, dtTempAddress);

	ULONG_PTR vmm_stack_region_base = (ULONG_PTR)(processor_data->vmm_stack_limit) + KERNEL_STACK_SIZE;
	ULONG_PTR vmm_stack_data = (ULONG_PTR)(vmm_stack_region_base)-sizeof(void *);
	*(ProcessorData **)vmm_stack_data = processor_data;

	if (!IsReleaseBuild())
	{
		void *vmm_stack_frame = (void *)(vmm_stack_data - sizeof(KtrapFrame));
		RtlFillMemory(vmm_stack_frame, sizeof(KtrapFrame), 0xff);
	}

	ULONG_PTR vmm_stack_base = vmm_stack_region_base - sizeof(void *) - sizeof(MachineFrame);

	//could write down the DbgPrint code

	if (!VmpEnterVmxMode(processor_data))
	{
		VmpFreeProcessorData(processor_data);
		KdPrint(("[Whisper]Enter VmxMode Error!\n"));
		return;
	}

	if (!VmpInitializeVmcs(processor_data))
	{
		KdPrint(("[Whisper]Initialize Vmcs error!\n"));
		goto Exit;
	}

	if (!VmpSetupVmcs(processor_data, (ULONG_PTR)guest_stack_pointer, (ULONG_PTR)guest_instruction_pointer, vmm_stack_base))
	{
		KdPrint(("[Whisper]SetupVmcs Error!\n"));
		goto Exit;
	}

	KdPrint(("[Whisper]Initialize Success!\n"));

	Asm_launch();

Exit:
	KdPrint(("[Whisper]Error Number: %I64x\n", Asm_VmRead((ULONG_PTR)VmcsField::kVmInstructionError)));
	Asm_VmxOff();
	VmpFreeProcessorData(processor_data);
	return;
}

void VmTermination() {

	/*auto status = */UtilForEachProcessor(VmpStopVm, nullptr);
	if (!VmIsWhisperInstalled()) {
	}
	else {
	}
	NT_ASSERT(!VmIsWhisperInstalled());
}

NTSTATUS VmpStartVm(void *context) {
	Asm_InitializeVM(VmpInitializeVm, context);
	/*NT_ASSERT(VmIsWhisperInstalled() == ok);*/
	if (!VmIsWhisperInstalled()) {
		return STATUS_UNSUCCESSFUL;
	}
	return STATUS_SUCCESS;
}

// Initialize shared processor data
SharedProcessorData *VmpInitializeSharedData() {


	SharedProcessorData *shared_data = (SharedProcessorData *)ExAllocatePoolWithTag(NonPagedPool, sizeof(SharedProcessorData), kHyperPlatformCommonPoolTag);
	if (shared_data == NULL)
	{
		return 0;
	}

	RtlZeroMemory(shared_data, sizeof(SharedProcessorData));

	shared_data->msr_bitmap = BuildMsrBitMap();
	if (shared_data->msr_bitmap == 0)
	{
		ExFreePoolWithTag(shared_data, kHyperPlatformCommonPoolTag);
		return 0;
	}

	PVOID pBuildIo = VmpBuildIoBitMap();
	if (pBuildIo == 0)
	{
		ExFreePoolWithTag(shared_data->msr_bitmap, kHyperPlatformCommonPoolTag);
		ExFreePoolWithTag(shared_data, kHyperPlatformCommonPoolTag);
		return 0;
	}

	shared_data->io_bitmap_a = pBuildIo;
	shared_data->io_bitmap_b = (PVOID)((unsigned char *)pBuildIo + PAGE_SIZE);

	return shared_data;

}

bool VmIsSupportVmx() {
	//refreshed
	ULONG32 szArrary[4];
	Asm_cpuid(1, szArrary);
	CpuFeaturesEcx cpuEcx;
	cpuEcx.all = szArrary[2];
	if (!cpuEcx.fields.vmx)
	{
		KdPrint(("Vmx bit is false!\n"));
		return false;
	}

	Ia32VmxBasicMsr dtVmxBasicMsr;
	Asm_rdmsr((ULONG32)Msr::kIa32VmxBasic, &dtVmxBasicMsr.all);
	if ((ULONG32)dtVmxBasicMsr.fields.memory_type != (ULONG32)memory_type::kWriteBack)
	{
		KdPrint(("memory_type is not WriteBack! %x\n", (ULONG32)dtVmxBasicMsr.fields.memory_type));
		return false;
	}

	Ia32FeatureControlMsr dtFeatureControlMsr;
	Asm_rdmsr((ULONG32)Msr::kIa32FeatureControl, &dtFeatureControlMsr.all);
	if (!dtFeatureControlMsr.fields.lock)
	{
		NTSTATUS nReturnStatus = UtilForEachProcessor(SetLockBitForEachProcessor, NULL);
		if (!NT_SUCCESS(nReturnStatus))
		{
			KdPrint(("Each processor is false!\n"));
			return false;
		}
	}
	if (!dtFeatureControlMsr.fields.enable_vmxon)
	{
		KdPrint(("Is not support vmxon!\n"));
		return false;
	}

	if (VmIsSupportEPT())
	{
		return true;
	}
	else {
		KdPrint(("Is not support EPT!\n"));
		return false;
	}
}