1 #include "threads/malloc.h"
7 #include "threads/mmu.h"
8 #include "threads/palloc.h"
9 #include "threads/synch.h"
11 /* A simple implementation of malloc().
13 The size of each request, in bytes, is rounded up to a power
14 of 2 and assigned to the "descriptor" that manages blocks of
15 that size. The descriptor keeps a list of free blocks. If
16 the free list is nonempty, one of its blocks is used to
19 Otherwise, a new page of memory, called an "arena", is
20 obtained from the page allocator (if none is available,
21 malloc() returns a null pointer). The new arena is divided
22 into blocks, all of which are added to the descriptor's free
23 list. Then we return one of the new blocks.
25 When we free a block, we add it to its descriptor's free list.
26 But if the arena that the block was in now has no in-use
27 blocks, we remove all of the arena's blocks from the free list
28 and give the arena back to the page allocator.
30 Major limitation: the largest block that can be allocated is
31 PGSIZE / 2, or 2 kB. Use palloc_get() to allocate pages (4 kB
32 blocks). You're on your own if you need more memory than
38 size_t block_size; /* Size of each element in bytes. */
39 size_t blocks_per_arena; /* Number of blocks in an arena. */
40 struct list free_list; /* List of free blocks. */
41 struct lock lock; /* Lock. */
44 /* Magic number for detecting arena corruption. */
45 #define ARENA_MAGIC 0x9a548eed
50 unsigned magic; /* Always set to ARENA_MAGIC. */
51 struct desc *desc; /* Owning descriptor. */
52 size_t free_cnt; /* Number of free blocks. */
58 list_elem free_elem; /* Free list element. */
61 /* Our set of descriptors. */
62 static struct desc descs[10]; /* Descriptors. */
63 static size_t desc_cnt; /* Number of descriptors. */
65 static struct arena *block_to_arena (struct block *);
66 static struct block *arena_to_block (struct arena *, size_t idx);
68 /* Initializes the malloc() descriptors. */
74 for (block_size = 16; block_size < PGSIZE; block_size *= 2)
76 struct desc *d = &descs[desc_cnt++];
77 ASSERT (desc_cnt <= sizeof descs / sizeof *descs);
78 d->block_size = block_size;
79 d->blocks_per_arena = (PGSIZE - sizeof (struct arena)) / block_size;
80 list_init (&d->free_list);
81 lock_init (&d->lock, "malloc");
85 /* Obtains and returns a new block of at least SIZE bytes.
86 Returns a null pointer if memory is not available. */
94 /* A null pointer satisfies a request for 0 bytes. */
98 /* Find the smallest descriptor that satisfies a SIZE-byte
100 for (d = descs; d < descs + desc_cnt; d++)
101 if (d->block_size >= size)
103 if (d == descs + desc_cnt)
105 printf ("malloc: %zu byte allocation too big\n", size);
109 lock_acquire (&d->lock);
111 /* If the free list is empty, create a new arena. */
112 if (list_empty (&d->free_list))
116 /* Allocate a page. */
120 lock_release (&d->lock);
124 /* Initialize arena and add its blocks to the free list. */
125 a->magic = ARENA_MAGIC;
127 a->free_cnt = d->blocks_per_arena;
128 for (i = 0; i < d->blocks_per_arena; i++)
130 struct block *b = arena_to_block (a, i);
131 list_push_back (&d->free_list, &b->free_elem);
135 /* Get a block from free list and return it. */
136 b = list_entry (list_pop_front (&d->free_list), struct block, free_elem);
137 a = block_to_arena (b);
139 lock_release (&d->lock);
143 /* Allocates and return A times B bytes initialized to zeroes.
144 Returns a null pointer if memory is not available. */
146 calloc (size_t a, size_t b)
151 /* Calculate block size. */
153 if (size < a || size < b)
156 /* Allocate and zero memory. */
164 /* Frees block P, which must have been previously allocated with
165 malloc() or calloc(). */
177 a = block_to_arena (b);
180 lock_acquire (&d->lock);
182 /* Add block to free list. */
183 list_push_front (&d->free_list, &b->free_elem);
185 /* If the arena is now entirely unused, free it. */
186 if (++a->free_cnt >= d->blocks_per_arena)
190 ASSERT (a->free_cnt == d->blocks_per_arena);
191 for (i = 0; i < d->blocks_per_arena; i++)
193 struct block *b = arena_to_block (a, i);
194 list_remove (&b->free_elem);
199 lock_release (&d->lock);
202 /* Returns the arena that block B is inside. */
203 static struct arena *
204 block_to_arena (struct block *b)
206 struct arena *a = pg_round_down (b);
208 ASSERT (a->magic == ARENA_MAGIC);
212 /* Returns the (IDX - 1)'th block within arena A. */
213 static struct block *
214 arena_to_block (struct arena *a, size_t idx)
217 ASSERT (a->magic == ARENA_MAGIC);
218 ASSERT (idx < a->desc->blocks_per_arena);
219 return (struct block *) ((uint8_t *) a
221 + idx * a->desc->block_size);