@@@ fltfmt mess
[mLib] / mem / sub.c
1 /* -*-c-*-
2 *
3 * Allocation of known-size blocks
4 *
5 * (c) 1998 Straylight/Edgeware
6 */
7
8 /*----- Licensing notice --------------------------------------------------*
9 *
10 * This file is part of the mLib utilities library.
11 *
12 * mLib is free software; you can redistribute it and/or modify
13 * it under the terms of the GNU Library General Public License as
14 * published by the Free Software Foundation; either version 2 of the
15 * License, or (at your option) any later version.
16 *
17 * mLib is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 * GNU Library General Public License for more details.
21 *
22 * You should have received a copy of the GNU Library General Public
23 * License along with mLib; if not, write to the Free
24 * Software Foundation, Inc., 59 Temple Place - Suite 330, Boston,
25 * MA 02111-1307, USA.
26 */
27
28 /*----- The big idea ------------------------------------------------------*
29 *
30 * This file provides an extra layer over @malloc@. It provides fast
31 * turnover for small blocks, and tries to minimize the per-block overhead.
32 *
33 * To do its job, @alloc@ must place an extra restriction on you: you must
34 * know the size of a block when you free it. Usually you'll have this
35 * information encoded in some way either in the block or in the thing that
36 * referenced it, so this isn't a hardship.
37 *
38 * It works fairly simply. If a request for a big block (as defined by the
39 * constants below) comes in, it gets sent on to @malloc@ unmolested. For
40 * small blocks, it goes straight to a `bin' -- a list containing free blocks
41 * of exactly that size, or the nearest bigger size we can manage. If the
42 * bin is empty, a `chunk' is allocated from @malloc@: this has enough room
43 * for lots of blocks of the requested size, so it ets split up and each
44 * individual small block is added to the bin list. The first block in the
45 * bin list is then removed and given to the caller. In this way, @malloc@
46 * only stores its information once for lots of little blocks, so we save
47 * memory. Because I know where the correct bin is just from the block size,
48 * and I don't need to do any searching at all in the usual case (because the
49 * list isn't empty) I can get a speed advantage too.
50 *
51 * This code is almost certainly not ANSI conformant, although I'm not
52 * actually sure. If some kind soul would let me know how seriously I've
53 * violated the standard, and whether this is easily fixable, I'd be
54 * grateful.
55 */
56
57 /*----- Header files ------------------------------------------------------*/
58
59 #include "config.h"
60
61 /* --- ANSI headers --- */
62
63 #include <assert.h>
64 #include <stdio.h>
65 #include <stdlib.h>
66 #include <string.h>
67
68 /* --- External headers --- */
69
70 #ifdef HAVE_VALGRIND_VALGRIND_H
71 # include <valgrind/valgrind.h>
72 # include <valgrind/memcheck.h>
73 # define VG(x) x
74 #else
75 # define VG(x)
76 #endif
77
78 /* --- Local headers --- */
79
80 #include "arena.h"
81 #include "exc.h"
82 #include "sub.h"
83
84 /*----- Configuration tweaks ----------------------------------------------*/
85
86 /* #define SUBARENA_TRIVIAL */
87
88 #define REDZONE_SIZE (2*SUB_GRANULE)
89
90 /*----- Static variables --------------------------------------------------*/
91
92 static size_t sizes[SUB_BINS];
93 VG( static unsigned flags; )
94 #define SF_VALGRIND 1u
95
96 /*----- Global variables --------------------------------------------------*/
97
98 subarena sub_global;
99
100 #ifdef SUBARENA_TRIVIAL
101
102 typedef struct sub_link {
103 struct sub_link *next;
104 void *p;
105 size_t sz;
106 } sub_link;
107
108 #else
109
110 union sub_header {
111 void *next;
112 union align _a;
113 };
114
115 #endif
116
117 /*----- Main code ---------------------------------------------------------*/
118
119 /* --- @subarena_create@ --- *
120 *
121 * Arguments: @subarena *s@ = pointer to arena to initialize
122 * @arena *a@ = pointer to underlying arena block
123 *
124 * Returns: ---
125 *
126 * Use: Initialize a suballocation arena based on an underlying large
127 * blocks arena.
128 */
129
130 void subarena_create(subarena *s, arena *a)
131 {
132 #ifdef SUBARENA_TRIVIAL
133 s->bin[0] = 0;
134 #else
135 size_t i;
136
137 if (!sizes[1]) sub_init();
138 for (i = 0; i < SUB_BINS; i++) s->bin[i] = 0;
139 VG( VALGRIND_CREATE_MEMPOOL(s, REDZONE_SIZE, 0); )
140 #endif
141 s->a = a;
142 }
143
144 /* --- @subarena_destroy@ --- *
145 *
146 * Arguments: @subarena *s@ = pointer to arena to destroy
147 *
148 * Returns: ---
149 *
150 * Use: Destroys a suballocation arena, freeing all of the memory it
151 * contains back to the underlying large blocks arena.
152 */
153
154 void subarena_destroy(subarena *s)
155 {
156 #ifdef SUBARENA_TRIVIAL
157
158 sub_link *l, *ll;
159
160 for (l = s->bin[0]; l; l = ll) {
161 ll = l;
162 a_free(s->a, l->p);
163 a_free(s->a, l);
164 }
165 s->bin[0] = 0;
166
167 #else
168
169 union sub_header *p, *q;
170
171 for (p = s->bin[0]; p; p = q) { q = p->next; A_FREE(s->a, p); }
172 VG( VALGRIND_DESTROY_MEMPOOL(s); )
173
174 #endif
175 }
176
177 /* --- @subarena_alloc@ --- *
178 *
179 * Arguments: @subarena *s@ = pointer to arena
180 * @size_t s@ = size of chunk wanted
181 *
182 * Returns: Pointer to a block at least as large as the one wanted.
183 *
184 * Use: Allocates a small block of memory from the given pool. The
185 * exception @EXC_NOMEM@ is raised if the underlying arena is
186 * full.
187 */
188
189 void *subarena_alloc(subarena *s, size_t sz)
190 {
191 #ifdef SUBARENA_TRIVIAL
192
193 sub_link *l;
194 void *p;
195
196 if (!s->a) subarena_create(s, arena_global);
197
198 if ((l = a_alloc(s->a, sizeof(*l))) == 0) return (0);
199 if ((p = a_alloc(s->a, sz)) == 0) { a_free(s->a, l); return (0); }
200 l->p = p; l->sz = sz; l->next = s->bin[0]; s->bin[0] = l;
201 return (p);
202
203 #else
204
205 unsigned char *p, *q;
206 union sub_header *h;
207 size_t bin, chsz, redsz;
208
209 /* --- Ensure that everything is initialized --- */
210
211 if (!s->a) subarena_create(s, arena_global);
212
213 /* --- Handle oversize blocks --- */
214
215 bin = SUB_BIN(sz);
216 if (bin >= SUB_BINS) {
217 p = A_ALLOC(s->a, sz); if (!p) THROW(EXC_NOMEM);
218 return (p);
219 }
220
221 /* --- If the bin is empty, find some memory --- */
222
223 if (!s->bin[bin]) {
224 redsz = 0; VG( if (flags&SF_VALGRIND) redsz = REDZONE_SIZE; )
225 chsz = SUB_BINSZ(bin) + redsz;
226 h = A_ALLOC(s->a, sizes[bin]); if (!h) THROW(EXC_NOMEM);
227 h->next = s->bin[0]; s->bin[0] = h;
228 p = (unsigned char *)(h + 1);
229 q = (unsigned char *)h + sizes[bin] - redsz - chsz; *(void **)q = 0;
230 while (q > p) { q -= chsz; *(void **)q = q + chsz; }
231 s->bin[bin] = q;
232 VG( VALGRIND_MAKE_MEM_NOACCESS(p, sizes[bin]); )
233 }
234
235 /* --- Extract the first block in the list --- */
236
237 p = s->bin[bin];
238 VG( if (flags&SF_VALGRIND) {
239 VALGRIND_MAKE_MEM_DEFINED(p, sizeof(void *));
240 s->bin[bin] = *(void **)p;
241 VALGRIND_MEMPOOL_ALLOC(s, p, sz);
242 } else )
243 s->bin[bin] = *(void **)p;
244 return (p);
245
246 #endif
247 }
248
249 /* --- @subarena_free@ --- *
250 *
251 * Arguments: @subarena *s@ = pointer to arena
252 * @void *p@ = address of block to free
253 * @size_t s@ = size of block
254 *
255 * Returns: ---
256 *
257 * Use: Frees a block allocated by @subarena_alloc@.
258 */
259
260 void subarena_free(subarena *s, void *p, size_t sz)
261 {
262 #ifdef SUBARENA_TRIVIAL
263
264 sub_link *lh = s->bin[0], **l, *ll;
265
266 for (l = &lh; *l && (*l)->p != p; l = &(*l)->next) ;
267 ll = *l; assert(ll); assert(ll->sz == sz);
268 *l = ll->next;
269 a_free(s->a, ll); a_free(s->a, p); s->bin[0] = lh;
270
271 #else
272
273 size_t bin = SUB_BIN(sz);
274
275 if (bin >= SUB_BINS)
276 A_FREE(s->a, p);
277 else {
278 *(void **)p = s->bin[bin]; s->bin[bin] = p;
279 VG( if (flags&SF_VALGRIND) VALGRIND_MEMPOOL_FREE(s, p); )
280 }
281
282 #endif
283 }
284
285 /*----- Compatibility stuff -----------------------------------------------*/
286
287 /* --- @sub_alloc@ --- *
288 *
289 * Arguments: @size_t s@ = size of chunk wanted
290 *
291 * Returns: Pointer to a block at least as large as the one wanted.
292 *
293 * Use: Allocates a small block of memory from the @sub_global@ pool.
294 */
295
296 void *(sub_alloc)(size_t sz) { return sub_alloc(sz); }
297
298 /* --- @sub_free@ --- *
299 *
300 * Arguments: @void *p@ = address of block to free
301 * @size_t s@ = size of block
302 *
303 * Returns: ---
304 *
305 * Use: Frees a block allocated by @sub_alloc@.
306 */
307
308 void (sub_free)(void *p, size_t sz) { sub_free(p, sz); }
309
310 /* --- @sub_init@ --- *
311 *
312 * Arguments: ---
313 *
314 * Returns: ---
315 *
316 * Use: Initializes the magic allocator.
317 */
318
319 void sub_init(void)
320 {
321 #ifndef SUBARENA_TRIVIAL
322 size_t n, sz;
323 int i;
324
325 /* Find out if we're running under Valgrind --- */
326
327 VG( if (RUNNING_ON_VALGRIND) flags |= SF_VALGRIND; )
328
329 /* --- Initialize the sizes bins --- */
330
331 for (i = 1; i < SUB_BINS; i++) {
332 sz = SUB_BINSZ(i);
333 n = (SUB_CHUNK + sz - 1)/sz;
334 sz = sizeof(union sub_header) + n*sz;
335 VG( if (flags&SF_VALGRIND) sz += (n + 1)*REDZONE_SIZE; )
336 sizes[i] = sz;
337 }
338 #endif
339 }
340
341 /*----- Debugging code ----------------------------------------------------*/
342
343 #ifdef TEST_RIG
344
345 #define BLOCKS 1024
346 #define SIZE_MAX 2048
347 #define ITERATIONS 500000
348
349 int main(void)
350 {
351 static void *block[BLOCKS];
352 static size_t size[BLOCKS];
353 size_t allocced = 0;
354 int i;
355 long count;
356
357 sub_init();
358
359 for (count = 0; count < ITERATIONS; count++) {
360 i = rand() % BLOCKS;
361 if (block[i]) {
362 sub_free(block[i], size[i]);
363 block[i] = 0;
364 allocced -= size[i];
365 } else {
366 block[i] = sub_alloc(size[i] =
367 rand() % (SUB_MAXBIN - 128) + 128);
368 allocced += size[i];
369 memset(block[i], 0, size[i]); /* trample allocated storage */
370 }
371 }
372
373 return (0);
374 }
375
376 #endif
377
378 /*----- That's all, folks -------------------------------------------------*/