45#include "umm_malloc_cfg.h"
46#include "umm_malloc.h"
55#include "dbglog/dbglog.h"
58extern void *UMM_MALLOC_CFG_HEAP_ADDR;
59extern uint32_t UMM_MALLOC_CFG_HEAP_SIZE;
66} UMM_H_ATTPACKSUF umm_ptr;
74 uint8_t data[UMM_BLOCK_BODY_SIZE -
sizeof(
struct umm_ptr_t)];
76} UMM_H_ATTPACKSUF umm_block;
78#define UMM_FREELIST_MASK ((uint16_t)(0x8000))
79#define UMM_BLOCKNO_MASK ((uint16_t)(0x7FFF))
92#define UMM_HEAP (umm_heap_current.pheap)
93#define UMM_HEAPSIZE (umm_heap_current.heap_size)
94#define UMM_NUMBLOCKS (umm_heap_current.numblocks)
96#define UMM_BLOCKSIZE (sizeof(umm_block))
97#define UMM_BLOCK_LAST (UMM_NUMBLOCKS - 1)
103#define UMM_BLOCK(b) (UMM_HEAP[b])
104#define UMM_DATA(b) (UMM_BLOCK(b).body.data)
110#define UMM_NBLOCK(b) (UMM_BLOCK(b).header.used.next)
111#define UMM_PBLOCK(b) (UMM_BLOCK(b).header.used.prev)
112#define UMM_NFREE(b) (UMM_BLOCK(b).body.free.next)
113#define UMM_PFREE(b) (UMM_BLOCK(b).body.free.prev)
123#include "umm_integrity.c"
124#include "umm_poison.c"
129static uint16_t umm_blocks(
size_t size) {
157 if (size <= (
sizeof(((umm_block *)0)->body))) {
168 size -= (
sizeof(((umm_block *)0)->body));
183 size_t blocks = (2 + ((size - 1) / (UMM_BLOCKSIZE)));
185 if (blocks > (INT16_MAX)) {
189 return (uint16_t)blocks;
201static void umm_split_block(uint16_t c,
203 uint16_t new_freemask) {
204 UMM_NBLOCK(c + blocks) = (UMM_NBLOCK(c) & UMM_BLOCKNO_MASK) | new_freemask;
205 UMM_PBLOCK(c + blocks) = c;
207 UMM_PBLOCK(UMM_NBLOCK(c) & UMM_BLOCKNO_MASK) = (c + blocks);
208 UMM_NBLOCK(c) = (c + blocks);
213static void umm_disconnect_from_free_list(uint16_t c) {
216 UMM_NFREE(UMM_PFREE(c)) = UMM_NFREE(c);
217 UMM_PFREE(UMM_NFREE(c)) = UMM_PFREE(c);
221 UMM_NBLOCK(c) &= (~UMM_FREELIST_MASK);
229static void umm_assimilate_up(uint16_t c) {
230 if (UMM_NBLOCK(UMM_NBLOCK(c)) & UMM_FREELIST_MASK) {
231 UMM_FRAGMENTATION_METRIC_REMOVE(UMM_NBLOCK(c));
238 DBGLOG_DEBUG(
"Assimilate up to next block, which is FREE\n");
242 umm_disconnect_from_free_list(UMM_NBLOCK(c));
246 UMM_PBLOCK(UMM_NBLOCK(UMM_NBLOCK(c)) & UMM_BLOCKNO_MASK) = c;
247 UMM_NBLOCK(c) = UMM_NBLOCK(UMM_NBLOCK(c)) & UMM_BLOCKNO_MASK;
256static uint16_t umm_assimilate_down(uint16_t c, uint16_t freemask) {
260 UMM_FRAGMENTATION_METRIC_REMOVE(UMM_PBLOCK(c));
262 UMM_NBLOCK(UMM_PBLOCK(c)) = UMM_NBLOCK(c) | freemask;
263 UMM_PBLOCK(UMM_NBLOCK(c)) = UMM_PBLOCK(c);
272 UMM_FRAGMENTATION_METRIC_ADD(UMM_PBLOCK(c));
275 return UMM_PBLOCK(c);
280void umm_init_heap(
void *ptr,
size_t size) {
282 UMM_HEAP = (umm_block *)ptr;
284 UMM_NUMBLOCKS = (UMM_HEAPSIZE / UMM_BLOCKSIZE);
285 memset(UMM_HEAP, 0x00, UMM_HEAPSIZE);
288 UMM_FRAGMENTATION_METRIC_INIT();
314 UMM_NBLOCK(1) = UMM_BLOCK_LAST | UMM_FREELIST_MASK;
324 UMM_PBLOCK(UMM_BLOCK_LAST) = 1;
333 umm_init_heap(UMM_MALLOC_CFG_HEAP_ADDR, UMM_MALLOC_CFG_HEAP_SIZE);
340static void umm_free_core(
void *ptr) {
354 c = (((uint8_t *)ptr) - (uint8_t *)(&(UMM_HEAP[0]))) / UMM_BLOCKSIZE;
356 DBGLOG_DEBUG(
"Freeing block %6i\n", c);
360 umm_assimilate_up(c);
364 if (UMM_NBLOCK(UMM_PBLOCK(c)) & UMM_FREELIST_MASK) {
365 DBGLOG_DEBUG(
"Assimilate down to previous block, which is FREE\n");
367 c = umm_assimilate_down(c, UMM_FREELIST_MASK);
373 UMM_FRAGMENTATION_METRIC_ADD(c);
375 DBGLOG_DEBUG(
"Just add to head of free list\n");
377 UMM_PFREE(UMM_NFREE(0)) = c;
378 UMM_NFREE(c) = UMM_NFREE(0);
382 UMM_NBLOCK(c) |= UMM_FREELIST_MASK;
388void umm_free(
void *ptr) {
389 UMM_CRITICAL_DECL(id_free);
391 UMM_CHECK_INITIALIZED();
395 if ((
void *)0 == ptr) {
396 DBGLOG_DEBUG(
"free a null pointer -> do nothing\n");
403 UMM_CRITICAL_ENTRY(id_free);
407 UMM_CRITICAL_EXIT(id_free);
414static void *umm_malloc_core(
size_t size) {
416 uint16_t blockSize = 0;
423 blocks = umm_blocks(size);
435 bestBlock = UMM_NFREE(0);
439 blockSize = (UMM_NBLOCK(cf) & UMM_BLOCKNO_MASK) - cf;
441 DBGLOG_TRACE(
"Looking at block %6i size %6i\n", cf, blockSize);
443#if defined UMM_BEST_FIT
444 if ((blockSize >= blocks) && (blockSize < bestSize)) {
446 bestSize = blockSize;
448#elif defined UMM_FIRST_FIT
450 if ((blockSize >= blocks)) {
454#error "No UMM_*_FIT is defined - check umm_malloc_cfg.h"
460 if (0x7FFF != bestSize) {
462 blockSize = bestSize;
465 if (UMM_NBLOCK(cf) & UMM_BLOCKNO_MASK && blockSize >= blocks) {
466 UMM_FRAGMENTATION_METRIC_REMOVE(cf);
475 if (blockSize == blocks) {
477 DBGLOG_DEBUG(
"Allocating %6i blocks starting at %6i - exact\n", blocks, cf);
481 umm_disconnect_from_free_list(cf);
484 DBGLOG_DEBUG(
"Allocating %6i blocks starting at %6i - existing\n", blocks, cf);
490 umm_split_block(cf, blocks, UMM_FREELIST_MASK );
492 UMM_FRAGMENTATION_METRIC_ADD(UMM_NBLOCK(cf));
502 UMM_NFREE(UMM_PFREE(cf)) = cf + blocks;
503 UMM_PFREE(cf + blocks) = UMM_PFREE(cf);
506 UMM_PFREE(UMM_NFREE(cf)) = cf + blocks;
507 UMM_NFREE(cf + blocks) = UMM_NFREE(cf);
512 DBGLOG_DEBUG(
"Can't allocate %5i blocks\n", blocks);
517 return (
void *)&UMM_DATA(cf);
522void *umm_malloc(
size_t size) {
523 UMM_CRITICAL_DECL(id_malloc);
527 UMM_CHECK_INITIALIZED();
537 DBGLOG_DEBUG(
"malloc a block of 0 bytes -> do nothing\n");
544 UMM_CRITICAL_ENTRY(id_malloc);
546 ptr = umm_malloc_core(size);
548 UMM_CRITICAL_EXIT(id_malloc);
555void *umm_realloc(
void *ptr,
size_t size) {
556 UMM_CRITICAL_DECL(id_realloc);
560 uint16_t prevBlockSize = 0;
561 uint16_t nextBlockSize = 0;
567 UMM_CHECK_INITIALIZED();
577 if (((
void *)NULL == ptr)) {
578 DBGLOG_DEBUG(
"realloc the NULL pointer - call malloc()\n");
580 return umm_malloc(size);
590 DBGLOG_DEBUG(
"realloc to 0 size, just free the block\n");
606 blocks = umm_blocks(size);
610 c = (((uint8_t *)ptr) - (uint8_t *)(&(UMM_HEAP[0]))) / UMM_BLOCKSIZE;
614 blockSize = (UMM_NBLOCK(c) - c);
618 curSize = (blockSize * UMM_BLOCKSIZE) - (
sizeof(((umm_block *)0)->header));
621 UMM_CRITICAL_ENTRY(id_realloc);
631 if ((UMM_NBLOCK(UMM_NBLOCK(c)) & UMM_FREELIST_MASK)) {
632 nextBlockSize = (UMM_NBLOCK(UMM_NBLOCK(c)) & UMM_BLOCKNO_MASK) - UMM_NBLOCK(c);
635 if ((UMM_NBLOCK(UMM_PBLOCK(c)) & UMM_FREELIST_MASK)) {
636 prevBlockSize = (c - UMM_PBLOCK(c));
639 DBGLOG_DEBUG(
"realloc blocks %i blockSize %i nextBlockSize %i prevBlockSize %i\n", blocks, blockSize, nextBlockSize, prevBlockSize);
677 if (blockSize >= blocks) {
678 DBGLOG_DEBUG(
"realloc the same or smaller size block - %i, do nothing\n", blocks);
682 }
else if ((blockSize + nextBlockSize) == blocks) {
683 DBGLOG_DEBUG(
"exact realloc using next block - %i\n", blocks);
684 umm_assimilate_up(c);
685 blockSize += nextBlockSize;
688 }
else if ((0 == prevBlockSize) && (blockSize + nextBlockSize) >= blocks) {
689 DBGLOG_DEBUG(
"realloc using next block - %i\n", blocks);
690 umm_assimilate_up(c);
691 blockSize += nextBlockSize;
694 }
else if ((prevBlockSize + blockSize) >= blocks) {
695 DBGLOG_DEBUG(
"realloc using prev block - %i\n", blocks);
696 umm_disconnect_from_free_list(UMM_PBLOCK(c));
697 c = umm_assimilate_down(c, 0);
698 memmove((
void *)&UMM_DATA(c), ptr, curSize);
699 ptr = (
void *)&UMM_DATA(c);
700 blockSize += prevBlockSize;
703 }
else if ((prevBlockSize + blockSize + nextBlockSize) >= blocks) {
704 DBGLOG_DEBUG(
"realloc using prev and next block - %i\n", blocks);
705 umm_assimilate_up(c);
706 umm_disconnect_from_free_list(UMM_PBLOCK(c));
707 c = umm_assimilate_down(c, 0);
708 memmove((
void *)&UMM_DATA(c), ptr, curSize);
709 ptr = (
void *)&UMM_DATA(c);
710 blockSize += (prevBlockSize + nextBlockSize);
714 DBGLOG_DEBUG(
"realloc a completely new block %i\n", blocks);
716 if ((ptr = umm_malloc_core(size))) {
717 DBGLOG_DEBUG(
"realloc %i to a bigger block %i, copy, and free the old\n", blockSize, blocks);
718 memcpy(ptr, oldptr, curSize);
719 umm_free_core(oldptr);
721 DBGLOG_DEBUG(
"realloc %i to a bigger block %i failed - return NULL and leave the old block!\n", blockSize, blocks);
731 if (blockSize > blocks) {
732 DBGLOG_DEBUG(
"split and free %i blocks from %i\n", blocks, blockSize);
733 umm_split_block(c, blocks, 0);
734 umm_free_core((
void *)&UMM_DATA(c + blocks));
738 UMM_CRITICAL_EXIT(id_realloc);
745void *umm_calloc(
size_t num,
size_t item_size) {
748 ret = umm_malloc((
size_t)(item_size * num));
751 memset(ret, 0x00, (
size_t)(item_size * num));