|
@@ -85,6 +85,21 @@ size_t freemem;
|
85
|
85
|
/** The heap itself */
|
86
|
86
|
static char heap[HEAP_SIZE] __attribute__ (( aligned ( __alignof__(void *) )));
|
87
|
87
|
|
|
88
|
+/**
|
|
89
|
+ * Discard some cached data
|
|
90
|
+ *
|
|
91
|
+ * @ret discarded Number of cached items discarded
|
|
92
|
+ */
|
|
93
|
+static unsigned int discard_cache ( void ) {
|
|
94
|
+ struct cache_discarder *discarder;
|
|
95
|
+ unsigned int discarded = 0;
|
|
96
|
+
|
|
97
|
+ for_each_table_entry ( discarder, CACHE_DISCARDERS ) {
|
|
98
|
+ discarded += discarder->discard();
|
|
99
|
+ }
|
|
100
|
+ return discarded;
|
|
101
|
+}
|
|
102
|
+
|
88
|
103
|
/**
|
89
|
104
|
* Allocate a memory block
|
90
|
105
|
*
|
|
@@ -112,55 +127,62 @@ void * alloc_memblock ( size_t size, size_t align ) {
|
112
|
127
|
align_mask = ( align - 1 ) | ( MIN_MEMBLOCK_SIZE - 1 );
|
113
|
128
|
|
114
|
129
|
DBG ( "Allocating %#zx (aligned %#zx)\n", size, align );
|
115
|
|
-
|
116
|
|
- /* Search through blocks for the first one with enough space */
|
117
|
|
- list_for_each_entry ( block, &free_blocks, list ) {
|
118
|
|
- pre_size = ( - virt_to_phys ( block ) ) & align_mask;
|
119
|
|
- post_size = block->size - pre_size - size;
|
120
|
|
- if ( post_size >= 0 ) {
|
121
|
|
- /* Split block into pre-block, block, and
|
122
|
|
- * post-block. After this split, the "pre"
|
123
|
|
- * block is the one currently linked into the
|
124
|
|
- * free list.
|
125
|
|
- */
|
126
|
|
- pre = block;
|
127
|
|
- block = ( ( ( void * ) pre ) + pre_size );
|
128
|
|
- post = ( ( ( void * ) block ) + size );
|
129
|
|
- DBG ( "[%p,%p) -> [%p,%p) + [%p,%p)\n", pre,
|
130
|
|
- ( ( ( void * ) pre ) + pre->size ), pre, block,
|
131
|
|
- post, ( ( ( void * ) pre ) + pre->size ) );
|
132
|
|
- /* If there is a "post" block, add it in to
|
133
|
|
- * the free list. Leak it if it is too small
|
134
|
|
- * (which can happen only at the very end of
|
135
|
|
- * the heap).
|
136
|
|
- */
|
137
|
|
- if ( ( size_t ) post_size >= MIN_MEMBLOCK_SIZE ) {
|
138
|
|
- post->size = post_size;
|
139
|
|
- list_add ( &post->list, &pre->list );
|
|
130
|
+ while ( 1 ) {
|
|
131
|
+ /* Search through blocks for the first one with enough space */
|
|
132
|
+ list_for_each_entry ( block, &free_blocks, list ) {
|
|
133
|
+ pre_size = ( - virt_to_phys ( block ) ) & align_mask;
|
|
134
|
+ post_size = block->size - pre_size - size;
|
|
135
|
+ if ( post_size >= 0 ) {
|
|
136
|
+ /* Split block into pre-block, block, and
|
|
137
|
+ * post-block. After this split, the "pre"
|
|
138
|
+ * block is the one currently linked into the
|
|
139
|
+ * free list.
|
|
140
|
+ */
|
|
141
|
+ pre = block;
|
|
142
|
+ block = ( ( ( void * ) pre ) + pre_size );
|
|
143
|
+ post = ( ( ( void * ) block ) + size );
|
|
144
|
+ DBG ( "[%p,%p) -> [%p,%p) + [%p,%p)\n", pre,
|
|
145
|
+ ( ( ( void * ) pre ) + pre->size ),
|
|
146
|
+ pre, block, post,
|
|
147
|
+ ( ( ( void * ) pre ) + pre->size ) );
|
|
148
|
+ /* If there is a "post" block, add it in to
|
|
149
|
+ * the free list. Leak it if it is too small
|
|
150
|
+ * (which can happen only at the very end of
|
|
151
|
+ * the heap).
|
|
152
|
+ */
|
|
153
|
+ if ( (size_t) post_size >= MIN_MEMBLOCK_SIZE ) {
|
|
154
|
+ post->size = post_size;
|
|
155
|
+ list_add ( &post->list, &pre->list );
|
|
156
|
+ }
|
|
157
|
+ /* Shrink "pre" block, leaving the main block
|
|
158
|
+ * isolated and no longer part of the free
|
|
159
|
+ * list.
|
|
160
|
+ */
|
|
161
|
+ pre->size = pre_size;
|
|
162
|
+ /* If there is no "pre" block, remove it from
|
|
163
|
+ * the list. Also remove it (i.e. leak it) if
|
|
164
|
+ * it is too small, which can happen only at
|
|
165
|
+ * the very start of the heap.
|
|
166
|
+ */
|
|
167
|
+ if ( pre_size < MIN_MEMBLOCK_SIZE )
|
|
168
|
+ list_del ( &pre->list );
|
|
169
|
+ /* Update total free memory */
|
|
170
|
+ freemem -= size;
|
|
171
|
+ /* Return allocated block */
|
|
172
|
+ DBG ( "Allocated [%p,%p)\n", block,
|
|
173
|
+ ( ( ( void * ) block ) + size ) );
|
|
174
|
+ return block;
|
140
|
175
|
}
|
141
|
|
- /* Shrink "pre" block, leaving the main block
|
142
|
|
- * isolated and no longer part of the free
|
143
|
|
- * list.
|
144
|
|
- */
|
145
|
|
- pre->size = pre_size;
|
146
|
|
- /* If there is no "pre" block, remove it from
|
147
|
|
- * the list. Also remove it (i.e. leak it) if
|
148
|
|
- * it is too small, which can happen only at
|
149
|
|
- * the very start of the heap.
|
150
|
|
- */
|
151
|
|
- if ( pre_size < MIN_MEMBLOCK_SIZE )
|
152
|
|
- list_del ( &pre->list );
|
153
|
|
- /* Update total free memory */
|
154
|
|
- freemem -= size;
|
155
|
|
- /* Return allocated block */
|
156
|
|
- DBG ( "Allocated [%p,%p)\n", block,
|
157
|
|
- ( ( ( void * ) block ) + size ) );
|
158
|
|
- return block;
|
159
|
176
|
}
|
160
|
|
- }
|
161
|
177
|
|
162
|
|
- DBG ( "Failed to allocate %#zx (aligned %#zx)\n", size, align );
|
163
|
|
- return NULL;
|
|
178
|
+ /* Try discarding some cached data to free up memory */
|
|
179
|
+ if ( ! discard_cache() ) {
|
|
180
|
+ /* Nothing available to discard */
|
|
181
|
+ DBG ( "Failed to allocate %#zx (aligned %#zx)\n",
|
|
182
|
+ size, align );
|
|
183
|
+ return NULL;
|
|
184
|
+ }
|
|
185
|
+ }
|
164
|
186
|
}
|
165
|
187
|
|
166
|
188
|
/**
|