23static uint64_t* g_NonpagedPoolVaBitmap;
26static volatile uint64_t g_NonpagedPoolHintIndex = 0;
29static uint64_t* g_PagedPoolVaBitmap;
32static volatile uint64_t g_PagedPoolHintIndex = 0;
67 if (!pte)
return false;
91 if (!pte)
return false;
116 kmemset(g_NonpagedPoolVaBitmap, 0, nonpaged_bitmap_bytes);
117 kmemset(g_PagedPoolVaBitmap, 0, paged_bitmap_bytes);
120 g_NonpagedPoolHintIndex = 0;
121 g_PagedPoolHintIndex = 0;
145 return (value >> b) & 1ULL;
160 uint64_t mask = (1ULL << b);
163 uint64_t old_qword = __sync_fetch_and_or(&bitmap[q], mask);
166 return (old_qword & mask) == 0;
215 IN size_t NumberOfBytes
241 size_t total_pages, total_qwords;
245 volatile uint64_t* hintIndexPtr;
249 if (NumberOfPages == 0)
return 0;
255 bitmap = g_NonpagedPoolVaBitmap;
257 hintIndexPtr = &g_NonpagedPoolHintIndex;
262 bitmap = g_PagedPoolVaBitmap;
264 hintIndexPtr = &g_PagedPoolHintIndex;
271 total_qwords = total_pages / 64;
274 if (NumberOfPages == 1) {
275 size_t start_q = (hint / 64) % total_qwords;
278 for (
size_t i = 0; i < total_qwords; i++) {
279 size_t q_idx = (start_q + i) % total_qwords;
284 uint64_t qword = bitmap[q_idx];
285 if (qword == 0xFFFFFFFFFFFFFFFFULL) {
289 uint64_t inverted_qword = ~qword;
290 unsigned long bit_index_in_qword = __builtin_ctzll(inverted_qword);
291 size_t global_bit_idx = (q_idx * 64) + bit_index_in_qword;
306 size_t start_idx = hint % total_pages;
307 size_t contiguous_found = 0;
308 size_t start_of_run_idx = 0;
311 for (
size_t i = 0; i < total_pages; i++) {
312 size_t current_idx = (start_idx + i) % total_pages;
317 contiguous_found = 0;
322 if (contiguous_found == 0) {
324 start_of_run_idx = current_idx;
328 if (current_idx < start_of_run_idx) {
329 contiguous_found = 0;
334 if (contiguous_found == NumberOfPages) {
338 for (; j < NumberOfPages; j++) {
339 size_t idx_to_claim = start_of_run_idx + j;
344 for (
size_t k = 0; k < j; k++) {
349 contiguous_found = 0;
355 if (j == NumberOfPages) {
369 IN size_t NumberOfBytes,
400 bitmap = g_NonpagedPoolVaBitmap;
405 bitmap = g_PagedPoolVaBitmap;
409 if (va < poolBase || va >= poolEnd)
return;
414 for (
size_t i = 0; i < NumberOfPages; i++) {
FORCEINLINE uint64_t InterlockedAndU64(volatile uint64_t *target, uint64_t value)
FORCEINLINE uint64_t InterlockedFetchU64(volatile uint64_t *target)
FORCEINLINE uint64_t InterlockedExchangeU64(volatile uint64_t *target, uint64_t value)
PMMPTE MiGetPtePointer(IN uintptr_t va)
#define PAGED_POOL_VA_TOTAL_PAGES
#define NONPAGED_POOL_VA_BITMAP_QWORDS
#define PAGED_POOL_VA_BITMAP_QWORDS
#define MI_PAGED_BITMAP_BASE
enum _POOL_TYPE POOL_TYPE
#define NONPAGED_POOL_VA_TOTAL_PAGES
#define MI_PAGED_POOL_BASE
#define PPFN_TO_PHYSICAL_ADDRESS(PPFN)
FORCEINLINE void * kmemset(void *dest, int64_t val, uint64_t len)
#define MI_NONPAGED_POOL_BASE
#define MI_PAGED_BITMAP_PAGES_NEEDED
#define BYTES_TO_PAGES(Bytes)
#define MI_NONPAGED_POOL_END
#define MI_PAGED_POOL_END
#define MI_NONPAGED_BITMAP_PAGES_NEEDED
#define INDEX_TO_PPFN(Index)
#define MI_WRITE_PTE(_PtePointer, _Va, _Pa, _Flags)
struct _PFN_ENTRY * PPFN_ENTRY
#define MI_NONPAGED_BITMAP_BASE
PAGE_INDEX MiRequestPhysicalPage(IN PFN_STATE ListType)
union _PFN_ENTRY::@217024126340164016372152071216274230164113211246 Descriptor
struct _PFN_ENTRY::@217024126340164016372152071216274230164113211246::@301110335271023021153236134322146064331241142124 Mapping
FORCEINLINE size_t MiVaToIndex(uintptr_t poolBase, uintptr_t va)
FORCEINLINE void MiBitmapClearBitLocked(uint64_t *bitmap, size_t bit)
FORCEINLINE bool MiBitmapTestAndSetBitLocked(uint64_t *bitmap, size_t bit)
FORCEINLINE uintptr_t MiIndexToVa(uintptr_t poolBase, size_t index)
uintptr_t MiAllocatePoolVa(IN POOL_TYPE PoolType, IN size_t NumberOfBytes)
FORCEINLINE bool MiBitmapTestBit(uint64_t *bitmap, size_t bit)
void MiFreePoolVaContiguous(IN uintptr_t va, IN size_t NumberOfBytes, IN POOL_TYPE PoolType)
bool MiInitializePoolVaSpace(void)