summaryrefslogtreecommitdiffstats
path: root/contrib/syslinux-4.02/core/mem
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/syslinux-4.02/core/mem')
-rw-r--r--contrib/syslinux-4.02/core/mem/free.c154
-rw-r--r--contrib/syslinux-4.02/core/mem/init.c38
-rw-r--r--contrib/syslinux-4.02/core/mem/malloc.c99
-rw-r--r--contrib/syslinux-4.02/core/mem/malloc.h82
4 files changed, 373 insertions, 0 deletions
diff --git a/contrib/syslinux-4.02/core/mem/free.c b/contrib/syslinux-4.02/core/mem/free.c
new file mode 100644
index 0000000..0becb9e
--- /dev/null
+++ b/contrib/syslinux-4.02/core/mem/free.c
@@ -0,0 +1,154 @@
+/*
+ * free.c
+ *
+ * Very simple linked-list based malloc()/free().
+ */
+
+#include <stdlib.h>
+#include <dprintf.h>
+#include "malloc.h"
+
+static struct free_arena_header *
+__free_block(struct free_arena_header *ah)
+{
+ struct free_arena_header *pah, *nah;
+ struct free_arena_header *head =
+ &__malloc_head[ARENA_HEAP_GET(ah->a.attrs)];
+
+ pah = ah->a.prev;
+ nah = ah->a.next;
+ if ( ARENA_TYPE_GET(pah->a.attrs) == ARENA_TYPE_FREE &&
+ (char *)pah+ARENA_SIZE_GET(pah->a.attrs) == (char *)ah ) {
+ /* Coalesce into the previous block */
+ ARENA_SIZE_SET(pah->a.attrs, ARENA_SIZE_GET(pah->a.attrs) +
+ ARENA_SIZE_GET(ah->a.attrs));
+ pah->a.next = nah;
+ nah->a.prev = pah;
+
+#ifdef DEBUG_MALLOC
+ ARENA_TYPE_SET(ah->a.attrs, ARENA_TYPE_DEAD);
+#endif
+
+ ah = pah;
+ pah = ah->a.prev;
+ } else {
+ /* Need to add this block to the free chain */
+ ARENA_TYPE_SET(ah->a.attrs, ARENA_TYPE_FREE);
+ ah->a.tag = MALLOC_FREE;
+
+ ah->next_free = head->next_free;
+ ah->prev_free = head;
+ head->next_free = ah;
+ ah->next_free->prev_free = ah;
+ }
+
+ /* In either of the previous cases, we might be able to merge
+ with the subsequent block... */
+ if ( ARENA_TYPE_GET(nah->a.attrs) == ARENA_TYPE_FREE &&
+ (char *)ah+ARENA_SIZE_GET(ah->a.attrs) == (char *)nah ) {
+ ARENA_SIZE_SET(ah->a.attrs, ARENA_SIZE_GET(ah->a.attrs) +
+ ARENA_SIZE_GET(nah->a.attrs));
+
+ /* Remove the old block from the chains */
+ nah->next_free->prev_free = nah->prev_free;
+ nah->prev_free->next_free = nah->next_free;
+ ah->a.next = nah->a.next;
+ nah->a.next->a.prev = ah;
+
+#ifdef DEBUG_MALLOC
+ ARENA_TYPE_SET(nah->a.attrs, ARENA_TYPE_DEAD);
+#endif
+ }
+
+ /* Return the block that contains the called block */
+ return ah;
+}
+
+void free(void *ptr)
+{
+ struct free_arena_header *ah;
+
+ dprintf("free(%p) @ %p\n", ptr, __builtin_return_address(0));
+
+ if ( !ptr )
+ return;
+
+ ah = (struct free_arena_header *)
+ ((struct arena_header *)ptr - 1);
+
+#ifdef DEBUG_MALLOC
+ assert( ARENA_TYPE_GET(ah->a.attrs) == ARENA_TYPE_USED );
+#endif
+
+ __free_block(ah);
+
+ /* Here we could insert code to return memory to the system. */
+}
+
+/*
+ * This is used to insert a block which is not previously on the
+ * free list. Only the a.size field of the arena header is assumed
+ * to be valid.
+ */
+void __inject_free_block(struct free_arena_header *ah)
+{
+ struct free_arena_header *head =
+ &__malloc_head[ARENA_HEAP_GET(ah->a.attrs)];
+ struct free_arena_header *nah;
+ size_t a_end = (size_t) ah + ARENA_SIZE_GET(ah->a.attrs);
+ size_t n_end;
+
+ dprintf("inject: %#zx bytes @ %p, heap %u (%p)\n",
+ ARENA_SIZE_GET(ah->a.attrs), ah,
+ ARENA_HEAP_GET(ah->a.attrs), head);
+
+ for (nah = head->a.next ; nah != head ; nah = nah->a.next) {
+ n_end = (size_t) nah + ARENA_SIZE_GET(nah->a.attrs);
+
+ /* Is nah entirely beyond this block? */
+ if ((size_t) nah >= a_end)
+ break;
+
+ /* Is this block entirely beyond nah? */
+ if ((size_t) ah >= n_end)
+ continue;
+
+ /* Otherwise we have some sort of overlap - reject this block */
+ return;
+ }
+
+ /* Now, nah should point to the successor block */
+ ah->a.next = nah;
+ ah->a.prev = nah->a.prev;
+ nah->a.prev = ah;
+ ah->a.prev->a.next = ah;
+
+ __free_block(ah);
+}
+
+/*
+ * Free all memory which is tagged with a specific tag.
+ */
+static void __free_tagged(malloc_tag_t tag) {
+ struct free_arena_header *fp, *head;
+ int i;
+
+ for (i = 0; i < NHEAP; i++) {
+ dprintf("__free_tagged(%u) heap %d\n", tag, i);
+ head = &__malloc_head[i];
+ for (fp = head ; fp != head ; fp = fp->a.next) {
+ if (ARENA_TYPE_GET(fp->a.attrs) == ARENA_TYPE_USED &&
+ fp->a.tag == tag)
+ fp = __free_block(fp);
+ }
+ }
+
+ dprintf("__free_tagged(%u) done\n", tag);
+}
+
+void comboot_cleanup_lowmem(com32sys_t *regs)
+{
+ (void)regs;
+
+ __free_tagged(MALLOC_MODULE);
+}
diff --git a/contrib/syslinux-4.02/core/mem/init.c b/contrib/syslinux-4.02/core/mem/init.c
new file mode 100644
index 0000000..487bbb3
--- /dev/null
+++ b/contrib/syslinux-4.02/core/mem/init.c
@@ -0,0 +1,38 @@
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include "malloc.h"
+
+struct free_arena_header __malloc_head[NHEAP];
+
+static __hugebss char main_heap[128 << 10];
+extern char __lowmem_heap[];
+
+void mem_init(void)
+{
+ struct free_arena_header *fp;
+ int i;
+ uint16_t *bios_free_mem = (uint16_t *)0x413;
+
+ /* Initialize the head nodes */
+
+ fp = &__malloc_head[0];
+ for (i = 0 ; i < NHEAP ; i++) {
+ fp->a.next = fp->a.prev = fp->next_free = fp->prev_free = fp;
+ fp->a.attrs = ARENA_TYPE_HEAD | (i << ARENA_HEAP_POS);
+ fp->a.tag = MALLOC_HEAD;
+ fp++;
+ }
+
+ /* Initialize the main heap */
+ fp = (struct free_arena_header *)main_heap;
+ fp->a.attrs = ARENA_TYPE_USED | (HEAP_MAIN << ARENA_HEAP_POS);
+ ARENA_SIZE_SET(fp->a.attrs, sizeof main_heap);
+ __inject_free_block(fp);
+
+ /* Initialize the lowmem heap */
+ fp = (struct free_arena_header *)__lowmem_heap;
+ fp->a.attrs = ARENA_TYPE_USED | (HEAP_LOWMEM << ARENA_HEAP_POS);
+ ARENA_SIZE_SET(fp->a.attrs, (*bios_free_mem << 10) - (uintptr_t)fp);
+ __inject_free_block(fp);
+}
diff --git a/contrib/syslinux-4.02/core/mem/malloc.c b/contrib/syslinux-4.02/core/mem/malloc.c
new file mode 100644
index 0000000..78f7b41
--- /dev/null
+++ b/contrib/syslinux-4.02/core/mem/malloc.c
@@ -0,0 +1,99 @@
+/*
+ * malloc.c
+ *
+ * Very simple linked-list based malloc()/free().
+ */
+
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include <dprintf.h>
+#include "malloc.h"
+
+static void *__malloc_from_block(struct free_arena_header *fp,
+ size_t size, malloc_tag_t tag)
+{
+ size_t fsize;
+ struct free_arena_header *nfp, *na;
+ unsigned int heap = ARENA_HEAP_GET(fp->a.attrs);
+
+ fsize = ARENA_SIZE_GET(fp->a.attrs);
+
+ /* We need the 2* to account for the larger requirements of a free block */
+ if ( fsize >= size+2*sizeof(struct arena_header) ) {
+ /* Bigger block than required -- split block */
+ nfp = (struct free_arena_header *)((char *)fp + size);
+ na = fp->a.next;
+
+ ARENA_TYPE_SET(nfp->a.attrs, ARENA_TYPE_FREE);
+ ARENA_HEAP_SET(nfp->a.attrs, heap);
+ ARENA_SIZE_SET(nfp->a.attrs, fsize-size);
+ nfp->a.tag = MALLOC_FREE;
+ ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
+ ARENA_SIZE_SET(fp->a.attrs, size);
+ fp->a.tag = tag;
+
+ /* Insert into all-block chain */
+ nfp->a.prev = fp;
+ nfp->a.next = na;
+ na->a.prev = nfp;
+ fp->a.next = nfp;
+
+ /* Replace current block on free chain */
+ nfp->next_free = fp->next_free;
+ nfp->prev_free = fp->prev_free;
+ fp->next_free->prev_free = nfp;
+ fp->prev_free->next_free = nfp;
+ } else {
+ /* Allocate the whole block */
+ ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
+ fp->a.tag = tag;
+
+ /* Remove from free chain */
+ fp->next_free->prev_free = fp->prev_free;
+ fp->prev_free->next_free = fp->next_free;
+ }
+
+ return (void *)(&fp->a + 1);
+}
+
+static void *_malloc(size_t size, enum heap heap, malloc_tag_t tag)
+{
+ struct free_arena_header *fp;
+ struct free_arena_header *head = &__malloc_head[heap];
+ void *p = NULL;
+
+ dprintf("_malloc(%zu, %u, %u) @ %p = ",
+ size, heap, tag, __builtin_return_address(0));
+
+ if (size) {
+ /* Add the obligatory arena header, and round up */
+ size = (size + 2 * sizeof(struct arena_header) - 1) & ARENA_SIZE_MASK;
+
+ for ( fp = head->next_free ; fp != head ; fp = fp->next_free ) {
+ if ( ARENA_SIZE_GET(fp->a.attrs) >= size ) {
+ /* Found fit -- allocate out of this block */
+ p = __malloc_from_block(fp, size, tag);
+ break;
+ }
+ }
+ }
+
+ dprintf("%p\n", p);
+ return p;
+}
+
+void *malloc(size_t size)
+{
+ return _malloc(size, HEAP_MAIN, MALLOC_CORE);
+}
+
+void *lmalloc(size_t size)
+{
+ return _malloc(size, HEAP_LOWMEM, MALLOC_CORE);
+}
+
+void *pmapi_lmalloc(size_t size)
+{
+ return _malloc(size, HEAP_LOWMEM, MALLOC_MODULE);
+}
diff --git a/contrib/syslinux-4.02/core/mem/malloc.h b/contrib/syslinux-4.02/core/mem/malloc.h
new file mode 100644
index 0000000..b8ec44d
--- /dev/null
+++ b/contrib/syslinux-4.02/core/mem/malloc.h
@@ -0,0 +1,82 @@
+/*
+ * malloc.h
+ *
+ * Internals for the memory allocator
+ */
+
+#include <stdint.h>
+#include <stddef.h>
+#include "core.h"
+
+/*
+ * This is a temporary hack. In Syslinux 5 this will be a pointer to
+ * the owner module.
+ */
+typedef size_t malloc_tag_t;
+enum malloc_owner {
+ MALLOC_FREE,
+ MALLOC_HEAD,
+ MALLOC_CORE,
+ MALLOC_MODULE,
+};
+
+struct free_arena_header;
+
+/*
+ * This structure should be a power of two. This becomes the
+ * alignment unit.
+ */
+struct arena_header {
+ malloc_tag_t tag;
+ size_t attrs; /* Bits 0..1: Type
+ 2..3: Heap,
+ 4..31: MSB of the size */
+ struct free_arena_header *next, *prev;
+};
+
+enum arena_type {
+ ARENA_TYPE_USED = 0,
+ ARENA_TYPE_FREE = 1,
+ ARENA_TYPE_HEAD = 2,
+ ARENA_TYPE_DEAD = 3,
+};
+enum heap {
+ HEAP_MAIN,
+ HEAP_LOWMEM,
+ NHEAP
+};
+
+#define ARENA_SIZE_MASK (~(uintptr_t)(sizeof(struct arena_header)-1))
+#define ARENA_HEAP_MASK ((size_t)0xc)
+#define ARENA_HEAP_POS 2
+#define ARENA_TYPE_MASK ((size_t)0x3)
+
+#define ARENA_ALIGN_UP(p) ((char *)(((uintptr_t)(p) + ~ARENA_SIZE_MASK) \
+ & ARENA_SIZE_MASK))
+#define ARENA_ALIGN_DOWN(p) ((char *)((uintptr_t)(p) & ARENA_SIZE_MASK))
+
+#define ARENA_SIZE_GET(attrs) ((attrs) & ARENA_SIZE_MASK)
+#define ARENA_HEAP_GET(attrs) (((attrs) & ARENA_HEAP_MASK) >> ARENA_HEAP_POS)
+#define ARENA_TYPE_GET(attrs) ((attrs) & ARENA_TYPE_MASK)
+
+#define ARENA_SIZE_SET(attrs, size) \
+ ((attrs) = ((size) & ARENA_SIZE_MASK) | ((attrs) & ~ARENA_SIZE_MASK))
+#define ARENA_HEAP_SET(attrs, heap) \
+ ((attrs) = (((heap) << ARENA_HEAP_POS) & ARENA_HEAP_MASK) | \
+ ((attrs) & ~ARENA_HEAP_MASK))
+#define ARENA_TYPE_SET(attrs, type) \
+ ((attrs) = ((attrs) & ~ARENA_TYPE_MASK) | \
+ ((type) & ARENA_TYPE_MASK))
+
+/*
+ * This structure should be no more than twice the size of the
+ * previous structure.
+ */
+struct free_arena_header {
+ struct arena_header a;
+ struct free_arena_header *next_free, *prev_free;
+ size_t _pad[2]; /* Pad to 2*sizeof(struct arena_header) */
+};
+
+extern struct free_arena_header __malloc_head[NHEAP];
+void __inject_free_block(struct free_arena_header *ah);