39 #define HEAP_ALIGN_SIZE 16 40 #define HEAP_ALIGN_MASK (HEAP_ALIGN_SIZE - 1) 41 #define SMALL_HEAP_MAGIC 0xFEEFABB1 42 #define LARGE_HEAP_MAGIC 0xFEEFABB2 65 static inline struct heapEntry *__smallFindUnusedHeapEntry(uint32_t
length)
69 if (length <= 32 && !ll_empty(&smallUnusedHeap32))
70 list = &smallUnusedHeap32;
71 else if (length <= 64 && !ll_empty(&smallUnusedHeap64))
72 list = &smallUnusedHeap64;
73 else if (length <= 128 && !ll_empty(&smallUnusedHeap128))
74 list = &smallUnusedHeap128;
75 else if (length <= 256 && !ll_empty(&smallUnusedHeap256))
76 list = &smallUnusedHeap256;
77 else if (length <= 512 && !ll_empty(&smallUnusedHeap512))
78 list = &smallUnusedHeap512;
79 else if (length <= 1024 && !ll_empty(&smallUnusedHeap1024))
80 list = &smallUnusedHeap1024;
88 static inline struct heapEntry *__smallQueueUnusedHeap(
struct heapEntry *heap, uint32_t length)
100 list = &smallUnusedHeap1024;
101 else if (length >= 512)
102 list = &smallUnusedHeap512;
103 else if (length >= 256)
104 list = &smallUnusedHeap256;
105 else if (length >= 128)
106 list = &smallUnusedHeap128;
107 else if (length >= 64)
108 list = &smallUnusedHeap64;
109 else if (length >= 32)
110 list = &smallUnusedHeap32;
116 ll_add_after(list, &heap->
entry);
127 while (prev_heap < heap)
132 if ((uint32_t)prev_heap + prev_heap->
length >= (uint32_t)heap)
135 prev_heap = (
struct heapEntry *)((uint32_t)prev_heap + prev_heap->
length);
139 assert(prev_heap == heap);
143 static inline struct heapEntry *__smallGetNextHeap(
struct heapEntry *heap, uint32_t length)
146 if ((uint32_t)next_heap >= ((uint32_t)heap & ~PAGE_MASK) +
PAGE_SIZE)
return NULL;
155 static struct heapEntry *__smallInternalFree(
struct heapEntry *deleted_heap, uint32_t length)
163 heap = __smallGetPreviousHeap(deleted_heap);
166 ll_remove(&heap->
entry);
167 length = (uint32_t)deleted_heap + length - (uint32_t)heap;
172 heap = __smallGetNextHeap(deleted_heap, length);
175 ll_remove(&heap->
entry);
176 length = (uint32_t)heap + heap->
length - (uint32_t)deleted_heap;
179 return __smallQueueUnusedHeap(deleted_heap, length);
182 static struct heapEntry *__smallAlloc(uint32_t length)
185 uint32_t origHeapLength;
190 heap = __smallFindUnusedHeapEntry(length);
200 origHeapLength = heap->
length;
209 if (origHeapLength < length +
sizeof(
struct heapEntry) + 16)
210 length = origHeapLength;
213 ll_add_after(&smallHeap, &heap->
entry);
218 if (length < origHeapLength)
219 __smallInternalFree((
struct heapEntry *)((uint32_t)heap + length), origHeapLength - length);
224 static void __smallFree(
struct heapEntry *heap)
230 ll_remove(&heap->
entry);
231 __smallInternalFree(heap, heap->
length);
237 uint32_t origHeapLength;
244 if (length > 1024 -
sizeof(
struct heapEntry))
250 origHeapLength = heap->
length;
251 if (length > origHeapLength)
254 next_heap = __smallGetNextHeap(heap, heap->
length);
255 if (!next_heap || next_heap->
reserved)
return NULL;
256 if (length > (uint32_t)next_heap + next_heap->
length - (uint32_t)heap)
return NULL;
259 ll_remove(&next_heap->
entry);
260 origHeapLength = (uint32_t)next_heap + next_heap->
length - (uint32_t)heap;
264 assert(origHeapLength >= length);
267 if (origHeapLength < length +
sizeof(
struct heapEntry) + 16)
268 length = origHeapLength;
276 if (length < origHeapLength)
277 __smallInternalFree((
struct heapEntry *)((uint32_t)heap + length), origHeapLength - length);
288 static struct heapEntry *__largeAlloc(uint32_t length)
294 length = (length +
PAGE_MASK) & ~PAGE_MASK;
299 ll_add_after(&largeHeap, &heap->
entry);
306 static void __largeFree(
struct heapEntry *heap)
310 assert(((uint32_t)heap & PAGE_MASK) == 0);
315 ll_remove(&heap->
entry);
322 assert(((uint32_t)heap & PAGE_MASK) == 0);
327 if (length <= 1024 -
sizeof(
struct heapEntry))
332 length = (length +
PAGE_MASK) & ~PAGE_MASK;
335 if (length == heap->
length)
339 ll_remove(&heap->
entry);
346 ll_add_after(&largeHeap, &heap->
entry);
366 if (!length)
return NULL;
368 if (length <= 1024 -
sizeof(
struct heapEntry))
369 heap = __smallAlloc(length);
371 heap = __largeAlloc(length);
373 return heap ? (
void *)((uint32_t)heap +
sizeof(
struct heapEntry)) : NULL;
441 struct heapEntry *heap, *new_heap = NULL;
461 new_heap = __smallReAlloc(heap, length);
463 new_heap = __largeReAlloc(heap, length);
467 return (
void *)((uint32_t)new_heap +
sizeof(
struct heapEntry));
471 if (!new_addr)
return NULL;
476 memcpy(new_addr, addr, length);
499 assert(((uint32_t)heap & PAGE_MASK) == 0);
505 #define VALIDATE_UNUSED_LIST(unused_list) \ 508 LL_FOR_EACH(heap, unused_list, struct heapEntry, entry) \ 510 assert(((uint32_t)heap & HEAP_ALIGN_MASK) == 0); \ 511 assert(heap->heapMagic == SMALL_HEAP_MAGIC); \ 512 assert(heap->length >= sizeof(struct heapEntry) + 16); \ 513 assert(!heap->reserved); \ 525 #undef VALIDATE_UNUSED_LIST uint32_t heapSize(void *addr)
Determines the size of a specific kernel memory block.
void * pagingReAllocatePhysMem(struct process *p, void *addr, uint32_t old_length, uint32_t new_length, bool rw, bool user)
Reallocates a specific range of virtual memory in a process.
void * heapReAlloc(void *addr, uint32_t length)
Resizes a block of kernel memory.
void * pagingAllocatePhysMem(struct process *p, uint32_t length, bool rw, bool user)
Allocates several pages of physical memory in a process.
#define LL_INIT(list)
Initializes a linkedList.
#define LL_ENTRY(element, type, field)
void * heapAlloc(uint32_t length)
Allocates a block of kernel memory.
void pagingReleasePhysMem(struct process *p, void *addr, uint32_t length)
Releases several pages of physical memory in a process.
void heapVerify()
Runs some internal checks to ensure that the heap is still valid.
void * memcpy(void *destination, const void *source, size_t num)
Copies a block of memory from source to destination.
#define VALIDATE_UNUSED_LIST(unused_list)
void heapFree(void *addr)
Deallocates a block of kernel memory.
#define LL_FOR_EACH(element, list, type, field)
Allows to iterate a linkedList similar to a for-loop.