1 | /* |
---|
2 | * malloc.c |
---|
3 | * |
---|
4 | * Very simple linked-list based malloc()/free(). |
---|
5 | */ |
---|
6 | |
---|
7 | #include <stdlib.h> |
---|
8 | #include <string.h> |
---|
9 | #include "malloc.h" |
---|
10 | |
---|
11 | struct free_arena_header __malloc_head = { |
---|
12 | { |
---|
13 | ARENA_TYPE_HEAD, |
---|
14 | 0, |
---|
15 | &__malloc_head, |
---|
16 | &__malloc_head, |
---|
17 | }, |
---|
18 | &__malloc_head, |
---|
19 | &__malloc_head |
---|
20 | }; |
---|
21 | |
---|
22 | extern void *__mem_end; /* In argv.c */ |
---|
23 | |
---|
24 | void __init_memory_arena(void) |
---|
25 | { |
---|
26 | extern char __heap_end[]; |
---|
27 | struct free_arena_header *fp; |
---|
28 | |
---|
29 | fp = (struct free_arena_header *)__mem_end; |
---|
30 | fp->a.type = ARENA_TYPE_FREE; |
---|
31 | fp->a.size = __heap_end - (char *)__mem_end; |
---|
32 | |
---|
33 | /* Insert into chains */ |
---|
34 | fp->a.next = fp->a.prev = &__malloc_head; |
---|
35 | fp->next_free = fp->prev_free = &__malloc_head; |
---|
36 | __malloc_head.a.next = __malloc_head.a.prev = fp; |
---|
37 | __malloc_head.next_free = __malloc_head.prev_free = fp; |
---|
38 | } |
---|
39 | |
---|
40 | static void *__malloc_from_block(struct free_arena_header *fp, size_t size) |
---|
41 | { |
---|
42 | size_t fsize; |
---|
43 | struct free_arena_header *nfp, *na; |
---|
44 | |
---|
45 | fsize = fp->a.size; |
---|
46 | |
---|
47 | /* We need the 2* to account for the larger requirements of a free block */ |
---|
48 | if (fsize >= size + 2 * sizeof(struct arena_header)) { |
---|
49 | /* Bigger block than required -- split block */ |
---|
50 | nfp = (struct free_arena_header *)((char *)fp + size); |
---|
51 | na = fp->a.next; |
---|
52 | |
---|
53 | nfp->a.type = ARENA_TYPE_FREE; |
---|
54 | nfp->a.size = fsize - size; |
---|
55 | fp->a.type = ARENA_TYPE_USED; |
---|
56 | fp->a.size = size; |
---|
57 | |
---|
58 | /* Insert into all-block chain */ |
---|
59 | nfp->a.prev = fp; |
---|
60 | nfp->a.next = na; |
---|
61 | na->a.prev = nfp; |
---|
62 | fp->a.next = nfp; |
---|
63 | |
---|
64 | /* Replace current block on free chain */ |
---|
65 | nfp->next_free = fp->next_free; |
---|
66 | nfp->prev_free = fp->prev_free; |
---|
67 | fp->next_free->prev_free = nfp; |
---|
68 | fp->prev_free->next_free = nfp; |
---|
69 | } else { |
---|
70 | /* Allocate the whole block */ |
---|
71 | fp->a.type = ARENA_TYPE_USED; |
---|
72 | |
---|
73 | /* Remove from free chain */ |
---|
74 | fp->next_free->prev_free = fp->prev_free; |
---|
75 | fp->prev_free->next_free = fp->next_free; |
---|
76 | } |
---|
77 | |
---|
78 | return (void *)(&fp->a + 1); |
---|
79 | } |
---|
80 | |
---|
81 | void *malloc(size_t size) |
---|
82 | { |
---|
83 | struct free_arena_header *fp; |
---|
84 | |
---|
85 | if (size == 0) |
---|
86 | return NULL; |
---|
87 | |
---|
88 | /* Add the obligatory arena header, and round up */ |
---|
89 | size = (size + 2 * sizeof(struct arena_header) - 1) & ~ARENA_SIZE_MASK; |
---|
90 | |
---|
91 | for (fp = __malloc_head.next_free; fp->a.type != ARENA_TYPE_HEAD; |
---|
92 | fp = fp->next_free) { |
---|
93 | if (fp->a.size >= size) { |
---|
94 | /* Found fit -- allocate out of this block */ |
---|
95 | return __malloc_from_block(fp, size); |
---|
96 | } |
---|
97 | } |
---|
98 | |
---|
99 | /* Nothing found... need to request a block from the kernel */ |
---|
100 | return NULL; /* No kernel to get stuff from */ |
---|
101 | } |
---|
102 | |
---|
103 | void *calloc(size_t nmemb, size_t size) |
---|
104 | { |
---|
105 | void *p; |
---|
106 | size *= nmemb; |
---|
107 | p = malloc(size); |
---|
108 | if (p) |
---|
109 | memset(p, 0, size); |
---|
110 | return p; |
---|
111 | } |
---|