HyperDbg Debugger
Loading...
Searching...
No Matches
VmxRegions.c File Reference

Implement allocations for VMX Regions (VMXON Region, VMCS, MSR Bitmap and etc.) More...

#include "pch.h"

Functions

_Use_decl_annotations_ BOOLEAN VmxAllocateVmxonRegion (VIRTUAL_MACHINE_STATE *VCpu)
 Allocates Vmxon region and set the Revision ID based on IA32_VMX_BASIC_MSR.
 
_Use_decl_annotations_ BOOLEAN VmxAllocateVmcsRegion (VIRTUAL_MACHINE_STATE *VCpu)
 Allocate Vmcs region and set the Revision ID based on IA32_VMX_BASIC_MSR.
 
BOOLEAN VmxAllocateVmmStack (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate VMM Stack.
 
BOOLEAN VmxAllocateMsrBitmap (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate a buffer for Msr Bitmap.
 
BOOLEAN VmxAllocateIoBitmaps (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate a buffer for I/O Bitmap.
 
UINT64VmxAllocateInvalidMsrBimap ()
 Allocates a buffer and tests for the MSRs that cause #GP.
 
BOOLEAN VmxAllocateHostIdt (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate a buffer for host IDT.
 
BOOLEAN VmxAllocateHostGdt (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate a buffer for host GDT.
 
BOOLEAN VmxAllocateHostTss (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate a buffer for host TSS.
 
BOOLEAN VmxAllocateHostInterruptStack (_Inout_ VIRTUAL_MACHINE_STATE *VCpu)
 Allocate a buffer for host interrupt stack.
 

Detailed Description

Implement allocations for VMX Regions (VMXON Region, VMCS, MSR Bitmap and etc.)

Author
Sina Karvandi (sina@.nosp@m.hype.nosp@m.rdbg..nosp@m.org)
Version
0.1
Date
2020-04-11

Function Documentation

◆ VmxAllocateHostGdt()

BOOLEAN VmxAllocateHostGdt ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate a buffer for host GDT.

Parameters
VCpu
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
333{
334 UINT32 GdtSize = HOST_GDT_DESCRIPTOR_COUNT * sizeof(SEGMENT_DESCRIPTOR_64);
335
336 //
337 // Make sure the memory is aligned
338 //
339 if (PAGE_SIZE > GdtSize)
340 {
341 GdtSize = PAGE_SIZE;
342 }
343
344 //
345 // Allocate aligned memory for host GDT
346 //
347 VCpu->HostGdt = (UINT64)PlatformMemAllocateZeroedNonPagedPool(GdtSize); // should be aligned
348
349 if (VCpu->HostGdt == NULL64_ZERO)
350 {
351 LogError("Err, insufficient memory in allocating host GDT");
352 return FALSE;
353 }
354
355 LogDebugInfo("Host GDT virtual address : 0x%llx", VCpu->HostGdt);
356
357 return TRUE;
358}
#define NULL64_ZERO
Definition BasicTypes.h:52
#define TRUE
Definition BasicTypes.h:55
#define FALSE
Definition BasicTypes.h:54
unsigned __int64 UINT64
Definition BasicTypes.h:21
unsigned int UINT32
Definition BasicTypes.h:48
#define LogDebugInfo(format,...)
Log, initialize boot information and debug information.
Definition HyperDbgHyperLogIntrinsics.h:155
#define LogError(format,...)
Log in the case of error.
Definition HyperDbgHyperLogIntrinsics.h:113
PVOID PlatformMemAllocateZeroedNonPagedPool(SIZE_T NumberOfBytes)
Allocate a non-paged buffer (zeroed)
Definition Mem.c:69
#define HOST_GDT_DESCRIPTOR_COUNT
Maximum number of entries in GDT.
Definition Segmentation.h:29
#define PAGE_SIZE
Size of each page (4096 bytes)
Definition common.h:69

◆ VmxAllocateHostIdt()

BOOLEAN VmxAllocateHostIdt ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate a buffer for host IDT.

Parameters
VCpu
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
298{
299 UINT32 IdtSize = HOST_IDT_DESCRIPTOR_COUNT * sizeof(SEGMENT_DESCRIPTOR_INTERRUPT_GATE_64);
300
301 //
302 // Make sure the memory is aligned
303 //
304 if (PAGE_SIZE > IdtSize)
305 {
306 IdtSize = PAGE_SIZE;
307 }
308
309 //
310 // Allocate aligned memory for host IDT
311 //
312 VCpu->HostIdt = (UINT64)PlatformMemAllocateZeroedNonPagedPool(IdtSize); // should be aligned
313
314 if (VCpu->HostIdt == NULL64_ZERO)
315 {
316 LogError("Err, insufficient memory in allocating host IDT");
317 return FALSE;
318 }
319
320 LogDebugInfo("Host IDT virtual address : 0x%llx", VCpu->HostIdt);
321
322 return TRUE;
323}
#define HOST_IDT_DESCRIPTOR_COUNT
Maximum number of interrupt entries in IDT.
Definition IdtEmulation.h:29

◆ VmxAllocateHostInterruptStack()

BOOLEAN VmxAllocateHostInterruptStack ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate a buffer for host interrupt stack.

Parameters
VCpu
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
395{
397
398 if (VCpu->HostInterruptStack == NULL64_ZERO)
399 {
400 LogError("Err, insufficient memory in allocating host interrupt stack");
401 return FALSE;
402 }
403
404 LogDebugInfo("Host interrupt stack virtual address : 0x%llx", VCpu->HostInterruptStack);
405
406 return TRUE;
407}
#define HOST_INTERRUPT_STACK_SIZE
Size of host interrupt stack.
Definition Segmentation.h:35

◆ VmxAllocateHostTss()

BOOLEAN VmxAllocateHostTss ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate a buffer for host TSS.

Parameters
VCpu
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
368{
369 UINT32 TssSize = PAGE_SIZE; // Make sure the memory is aligned
370
371 //
372 // Allocate aligned memory for host TSS
373 //
374 VCpu->HostTss = (UINT64)PlatformMemAllocateZeroedNonPagedPool(TssSize); // should be aligned
375
376 if (VCpu->HostTss == NULL64_ZERO)
377 {
378 LogError("Err, insufficient memory in allocating host TSS");
379 return FALSE;
380 }
381
382 LogDebugInfo("Host TSS virtual address : 0x%llx", VCpu->HostTss);
383
384 return TRUE;
385}

◆ VmxAllocateInvalidMsrBimap()

UINT64 * VmxAllocateInvalidMsrBimap ( )

Allocates a buffer and tests for the MSRs that cause #GP.

Returns
UINT64 Allocated buffer for MSR Bitmap
265{
266 UINT64 * InvalidMsrBitmap;
267
268 InvalidMsrBitmap = PlatformMemAllocateZeroedNonPagedPool(0x1000 / 0x8);
269
270 if (InvalidMsrBitmap == NULL)
271 {
272 return NULL;
273 }
274
275 for (UINT32 i = 0; i < 0x1000; ++i)
276 {
277 __try
278 {
279 __readmsr(i);
280 }
281 __except (EXCEPTION_EXECUTE_HANDLER)
282 {
283 SetBit(i, (unsigned long *)InvalidMsrBitmap);
284 }
285 }
286
287 return InvalidMsrBitmap;
288}
void SetBit(int BitNumber, unsigned long *addr)
set the bit
Definition Bitwise.c:46
NULL()
Definition test-case-generator.py:530

◆ VmxAllocateIoBitmaps()

BOOLEAN VmxAllocateIoBitmaps ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate a buffer for I/O Bitmap.

Parameters
VCpu
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
222{
223 //
224 // Allocate memory for I/O Bitmap (A)
225 //
226 VCpu->IoBitmapVirtualAddressA = (UINT64)PlatformMemAllocateZeroedNonPagedPool(PAGE_SIZE); // should be aligned
227
228 if (VCpu->IoBitmapVirtualAddressA == NULL64_ZERO)
229 {
230 LogError("Err, insufficient memory in allocating I/O Bitmaps A");
231 return FALSE;
232 }
233
234 VCpu->IoBitmapPhysicalAddressA = VirtualAddressToPhysicalAddress((PVOID)VCpu->IoBitmapVirtualAddressA);
235
236 LogDebugInfo("I/O Bitmap A Virtual Address : 0x%llx", VCpu->IoBitmapVirtualAddressA);
237 LogDebugInfo("I/O Bitmap A Physical Address : 0x%llx", VCpu->IoBitmapPhysicalAddressA);
238
239 //
240 // Allocate memory for I/O Bitmap (B)
241 //
242 VCpu->IoBitmapVirtualAddressB = (UINT64)PlatformMemAllocateZeroedNonPagedPool(PAGE_SIZE); // should be aligned
243
244 if (VCpu->IoBitmapVirtualAddressB == NULL64_ZERO)
245 {
246 LogError("Err, insufficient memory in allocating I/O Bitmaps B");
247 return FALSE;
248 }
249
250 VCpu->IoBitmapPhysicalAddressB = VirtualAddressToPhysicalAddress((PVOID)VCpu->IoBitmapVirtualAddressB);
251
252 LogDebugInfo("I/O Bitmap B virtual address : 0x%llx", VCpu->IoBitmapVirtualAddressB);
253 LogDebugInfo("I/O Bitmap B physical address : 0x%llx", VCpu->IoBitmapPhysicalAddressB);
254
255 return TRUE;
256}
_Use_decl_annotations_ UINT64 VirtualAddressToPhysicalAddress(_In_ PVOID VirtualAddress)
Converts Virtual Address to Physical Address.
Definition Conversion.c:154

◆ VmxAllocateMsrBitmap()

BOOLEAN VmxAllocateMsrBitmap ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate a buffer for Msr Bitmap.

Parameters
VCpuThe virtual processor's state
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
193{
194 //
195 // Allocate memory for MSR Bitmap
196 // Should be aligned
197 //
198 VCpu->MsrBitmapVirtualAddress = (UINT64)PlatformMemAllocateZeroedNonPagedPool(PAGE_SIZE);
199
200 if (VCpu->MsrBitmapVirtualAddress == NULL64_ZERO)
201 {
202 LogError("Err, insufficient memory in allocating MSR Bitmaps");
203 return FALSE;
204 }
205
206 VCpu->MsrBitmapPhysicalAddress = VirtualAddressToPhysicalAddress((PVOID)VCpu->MsrBitmapVirtualAddress);
207
208 LogDebugInfo("MSR Bitmap virtual address : 0x%llx", VCpu->MsrBitmapVirtualAddress);
209 LogDebugInfo("MSR Bitmap physical address : 0x%llx", VCpu->MsrBitmapPhysicalAddress);
210
211 return TRUE;
212}

◆ VmxAllocateVmcsRegion()

_Use_decl_annotations_ BOOLEAN VmxAllocateVmcsRegion ( VIRTUAL_MACHINE_STATE * VCpu)

Allocate Vmcs region and set the Revision ID based on IA32_VMX_BASIC_MSR.

Parameters
CurrentGuestState
Returns
BOOLEAN Returns true if allocation was successful and vmptrld executed without error otherwise returns false
104{
105 IA32_VMX_BASIC_REGISTER VmxBasicMsr = {0};
106 SIZE_T VmcsSize;
107 UINT8 * VmcsRegion;
108 UINT64 VmcsPhysicalAddr;
109 UINT64 AlignedVmcsRegion;
110 UINT64 AlignedVmcsRegionPhysicalAddr;
111
112#ifdef ENV_WINDOWS
113 //
114 // at IRQL > DISPATCH_LEVEL memory allocation routines don't work
115 //
116 if (KeGetCurrentIrql() > DISPATCH_LEVEL)
117 KeRaiseIrqlToDpcLevel();
118#endif // ENV_WINDOWS
119
120 //
121 // Allocating a 4-KByte Contiguous Memory region
122 //
123 VmcsSize = 2 * VMCS_SIZE;
125 if (VmcsRegion == NULL)
126 {
127 LogError("Err, couldn't allocate Buffer for VMCS region");
128 return FALSE;
129 }
130
131 VmcsPhysicalAddr = VirtualAddressToPhysicalAddress(VmcsRegion);
132
133 AlignedVmcsRegion = (UINT64)((ULONG_PTR)(VmcsRegion + ALIGNMENT_PAGE_SIZE - 1) & ~(ALIGNMENT_PAGE_SIZE - 1));
134 LogDebugInfo("VMCS region address : %llx", AlignedVmcsRegion);
135
136 AlignedVmcsRegionPhysicalAddr = (UINT64)((ULONG_PTR)(VmcsPhysicalAddr + ALIGNMENT_PAGE_SIZE - 1) & ~(ALIGNMENT_PAGE_SIZE - 1));
137 LogDebugInfo("VMCS region physical address : %llx", AlignedVmcsRegionPhysicalAddr);
138
139 //
140 // get IA32_VMX_BASIC_MSR RevisionId
141 //
142 VmxBasicMsr.AsUInt = __readmsr(IA32_VMX_BASIC);
143 LogDebugInfo("Revision Identifier (IA32_VMX_BASIC - MSR 0x480) : 0x%x", VmxBasicMsr.VmcsRevisionId);
144
145 //
146 // Changing Revision Identifier
147 //
148 *(UINT64 *)AlignedVmcsRegion = VmxBasicMsr.VmcsRevisionId;
149
150 VCpu->VmcsRegionPhysicalAddress = AlignedVmcsRegionPhysicalAddr;
151 //
152 // We save the allocated buffer (not the aligned buffer)
153 // because we want to free it in vmx termination
154 //
155 VCpu->VmcsRegionVirtualAddress = (UINT64)VmcsRegion;
156
157 return TRUE;
158}
unsigned char UINT8
Definition BasicTypes.h:46
PVOID PlatformMemAllocateContiguousZeroedMemory(SIZE_T NumberOfBytes)
Allocate a contiguous zeroed memory.
Definition Mem.c:22
#define VMCS_SIZE
VMCS Region Size.
Definition Vmx.h:22
#define ALIGNMENT_PAGE_SIZE
Alignment Size.
Definition Common.h:176
#define DISPATCH_LEVEL
Definition Common.h:44
UINT64 VmcsRegionPhysicalAddress
Definition State.h:312
UINT64 VmcsRegionVirtualAddress
Definition State.h:313

◆ VmxAllocateVmmStack()

BOOLEAN VmxAllocateVmmStack ( _Inout_ VIRTUAL_MACHINE_STATE * VCpu)

Allocate VMM Stack.

Parameters
VCpuThe virtual processor's state
Returns
BOOLEAN Returns true if allocation was successful otherwise returns false
168{
169 //
170 // Allocate stack for the VM Exit Handler
171 //
173
174 if (VCpu->VmmStack == NULL64_ZERO)
175 {
176 LogError("Err, insufficient memory in allocating vmm stack");
177 return FALSE;
178 }
179
180 LogDebugInfo("VMM Stack for logical processor : 0x%llx", VCpu->VmmStack);
181
182 return TRUE;
183}
#define VMM_STACK_SIZE
Stack Size.
Definition Vmx.h:140

◆ VmxAllocateVmxonRegion()

_Use_decl_annotations_ BOOLEAN VmxAllocateVmxonRegion ( VIRTUAL_MACHINE_STATE * VCpu)

Allocates Vmxon region and set the Revision ID based on IA32_VMX_BASIC_MSR.

Parameters
CurrentGuestState
Returns
BOOLEAN Returns true if allocation was successful and vmxon executed without error otherwise returns false
24{
25 IA32_VMX_BASIC_REGISTER VmxBasicMsr = {0};
26 SIZE_T VmxonSize;
27 UINT8 VmxonStatus;
28 UINT8 * VmxonRegion;
29 UINT64 VmxonRegionPhysicalAddr;
30 UINT64 AlignedVmxonRegion;
31 UINT64 AlignedVmxonRegionPhysicalAddr;
32
33#ifdef ENV_WINDOWS
34 //
35 // at IRQL > DISPATCH_LEVEL memory allocation routines don't work
36 //
37 if (KeGetCurrentIrql() > DISPATCH_LEVEL)
38 KeRaiseIrqlToDpcLevel();
39#endif // ENV_WINDOWS
40
41 //
42 // Allocating a 4-KByte Contiguous Memory region
43 //
44 VmxonSize = 2 * VMXON_SIZE;
46 if (VmxonRegion == NULL)
47 {
48 LogError("Err, couldn't allocate buffer for VMXON region");
49 return FALSE;
50 }
51
52 VmxonRegionPhysicalAddr = VirtualAddressToPhysicalAddress(VmxonRegion);
53
54 AlignedVmxonRegion = (UINT64)((ULONG_PTR)(VmxonRegion + ALIGNMENT_PAGE_SIZE - 1) & ~(ALIGNMENT_PAGE_SIZE - 1));
55 LogDebugInfo("VMXON Region Address : %llx", AlignedVmxonRegion);
56
57 //
58 // 4 kb >= buffers are aligned, just a double check to ensure if it's aligned
59 //
60 AlignedVmxonRegionPhysicalAddr = (UINT64)((ULONG_PTR)(VmxonRegionPhysicalAddr + ALIGNMENT_PAGE_SIZE - 1) & ~(ALIGNMENT_PAGE_SIZE - 1));
61 LogDebugInfo("VMXON Region Physical Address : %llx", AlignedVmxonRegionPhysicalAddr);
62
63 //
64 // get IA32_VMX_BASIC_MSR RevisionId
65 //
66 VmxBasicMsr.AsUInt = __readmsr(IA32_VMX_BASIC);
67 LogDebugInfo("Revision Identifier (IA32_VMX_BASIC - MSR 0x480) : 0x%x", VmxBasicMsr.VmcsRevisionId);
68
69 //
70 // Changing Revision Identifier
71 //
72 *(UINT64 *)AlignedVmxonRegion = VmxBasicMsr.VmcsRevisionId;
73
74 //
75 // Execute Vmxon instruction
76 //
77 VmxonStatus = __vmx_on(&AlignedVmxonRegionPhysicalAddr);
78 if (VmxonStatus)
79 {
80 LogError("Err, executing vmxon instruction failed with status : %d", VmxonStatus);
81 return FALSE;
82 }
83
84 VCpu->VmxonRegionPhysicalAddress = AlignedVmxonRegionPhysicalAddr;
85
86 //
87 // We save the allocated buffer (not the aligned buffer) because we want to free it in vmx termination
88 //
89 VCpu->VmxonRegionVirtualAddress = (UINT64)VmxonRegion;
90
91 return TRUE;
92}
#define VMXON_SIZE
VMXON Region Size.
Definition Vmx.h:28
UINT64 VmxonRegionVirtualAddress
Definition State.h:311
UINT64 VmxonRegionPhysicalAddress
Definition State.h:310