]> git.proxmox.com Git - ceph.git/blob - ceph/src/boost/tools/build/src/engine/boehm_gc/typd_mlc.c
add subtree-ish sources for 12.0.3
[ceph.git] / ceph / src / boost / tools / build / src / engine / boehm_gc / typd_mlc.c
1 /*
2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3 * opyright (c) 1999-2000 by Hewlett-Packard Company. All rights reserved.
4 *
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
7 *
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
13 *
14 */
15
16
17 /*
18 * Some simple primitives for allocation with explicit type information.
19 * Simple objects are allocated such that they contain a GC_descr at the
20 * end (in the last allocated word). This descriptor may be a procedure
21 * which then examines an extended descriptor passed as its environment.
22 *
23 * Arrays are treated as simple objects if they have sufficiently simple
24 * structure. Otherwise they are allocated from an array kind that supplies
25 * a special mark procedure. These arrays contain a pointer to a
26 * complex_descriptor as their last word.
27 * This is done because the environment field is too small, and the collector
28 * must trace the complex_descriptor.
29 *
30 * Note that descriptors inside objects may appear cleared, if we encounter a
31 * false refrence to an object on a free list. In the GC_descr case, this
32 * is OK, since a 0 descriptor corresponds to examining no fields.
33 * In the complex_descriptor case, we explicitly check for that case.
34 *
35 * MAJOR PARTS OF THIS CODE HAVE NOT BEEN TESTED AT ALL and are not testable,
36 * since they are not accessible through the current interface.
37 */
38
39 #include "private/gc_pmark.h"
40 #include "gc_typed.h"
41
42 # define TYPD_EXTRA_BYTES (sizeof(word) - EXTRA_BYTES)
43
44 GC_bool GC_explicit_typing_initialized = FALSE;
45
46 int GC_explicit_kind; /* Object kind for objects with indirect */
47 /* (possibly extended) descriptors. */
48
49 int GC_array_kind; /* Object kind for objects with complex */
50 /* descriptors and GC_array_mark_proc. */
51
52 /* Extended descriptors. GC_typed_mark_proc understands these. */
53 /* These are used for simple objects that are larger than what */
54 /* can be described by a BITMAP_BITS sized bitmap. */
55 typedef struct {
56 word ed_bitmap; /* lsb corresponds to first word. */
57 GC_bool ed_continued; /* next entry is continuation. */
58 } ext_descr;
59
60 /* Array descriptors. GC_array_mark_proc understands these. */
61 /* We may eventually need to add provisions for headers and */
62 /* trailers. Hence we provide for tree structured descriptors, */
63 /* though we don't really use them currently. */
64 typedef union ComplexDescriptor {
65 struct LeafDescriptor { /* Describes simple array */
66 word ld_tag;
67 # define LEAF_TAG 1
68 size_t ld_size; /* bytes per element */
69 /* multiple of ALIGNMENT */
70 size_t ld_nelements; /* Number of elements. */
71 GC_descr ld_descriptor; /* A simple length, bitmap, */
72 /* or procedure descriptor. */
73 } ld;
74 struct ComplexArrayDescriptor {
75 word ad_tag;
76 # define ARRAY_TAG 2
77 size_t ad_nelements;
78 union ComplexDescriptor * ad_element_descr;
79 } ad;
80 struct SequenceDescriptor {
81 word sd_tag;
82 # define SEQUENCE_TAG 3
83 union ComplexDescriptor * sd_first;
84 union ComplexDescriptor * sd_second;
85 } sd;
86 } complex_descriptor;
87 #define TAG ld.ld_tag
88
89 ext_descr * GC_ext_descriptors; /* Points to array of extended */
90 /* descriptors. */
91
92 size_t GC_ed_size = 0; /* Current size of above arrays. */
93 # define ED_INITIAL_SIZE 100;
94
95 size_t GC_avail_descr = 0; /* Next available slot. */
96
97 int GC_typed_mark_proc_index; /* Indices of my mark */
98 int GC_array_mark_proc_index; /* procedures. */
99
100 /* Add a multiword bitmap to GC_ext_descriptors arrays. Return */
101 /* starting index. */
102 /* Returns -1 on failure. */
103 /* Caller does not hold allocation lock. */
104 signed_word GC_add_ext_descriptor(GC_bitmap bm, word nbits)
105 {
106 size_t nwords = divWORDSZ(nbits + WORDSZ-1);
107 signed_word result;
108 size_t i;
109 word last_part;
110 size_t extra_bits;
111 DCL_LOCK_STATE;
112
113 LOCK();
114 while (GC_avail_descr + nwords >= GC_ed_size) {
115 ext_descr * new;
116 size_t new_size;
117 word ed_size = GC_ed_size;
118
119 UNLOCK();
120 if (ed_size == 0) {
121 new_size = ED_INITIAL_SIZE;
122 } else {
123 new_size = 2 * ed_size;
124 if (new_size > MAX_ENV) return(-1);
125 }
126 new = (ext_descr *) GC_malloc_atomic(new_size * sizeof(ext_descr));
127 if (new == 0) return(-1);
128 LOCK();
129 if (ed_size == GC_ed_size) {
130 if (GC_avail_descr != 0) {
131 BCOPY(GC_ext_descriptors, new,
132 GC_avail_descr * sizeof(ext_descr));
133 }
134 GC_ed_size = new_size;
135 GC_ext_descriptors = new;
136 } /* else another thread already resized it in the meantime */
137 }
138 result = GC_avail_descr;
139 for (i = 0; i < nwords-1; i++) {
140 GC_ext_descriptors[result + i].ed_bitmap = bm[i];
141 GC_ext_descriptors[result + i].ed_continued = TRUE;
142 }
143 last_part = bm[i];
144 /* Clear irrelevant bits. */
145 extra_bits = nwords * WORDSZ - nbits;
146 last_part <<= extra_bits;
147 last_part >>= extra_bits;
148 GC_ext_descriptors[result + i].ed_bitmap = last_part;
149 GC_ext_descriptors[result + i].ed_continued = FALSE;
150 GC_avail_descr += nwords;
151 UNLOCK();
152 return(result);
153 }
154
155 /* Table of bitmap descriptors for n word long all pointer objects. */
156 GC_descr GC_bm_table[WORDSZ/2];
157
158 /* Return a descriptor for the concatenation of 2 nwords long objects, */
159 /* each of which is described by descriptor. */
160 /* The result is known to be short enough to fit into a bitmap */
161 /* descriptor. */
162 /* Descriptor is a GC_DS_LENGTH or GC_DS_BITMAP descriptor. */
163 GC_descr GC_double_descr(GC_descr descriptor, word nwords)
164 {
165 if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
166 descriptor = GC_bm_table[BYTES_TO_WORDS((word)descriptor)];
167 };
168 descriptor |= (descriptor & ~GC_DS_TAGS) >> nwords;
169 return(descriptor);
170 }
171
172 complex_descriptor * GC_make_sequence_descriptor();
173
174 /* Build a descriptor for an array with nelements elements, */
175 /* each of which can be described by a simple descriptor. */
176 /* We try to optimize some common cases. */
177 /* If the result is COMPLEX, then a complex_descr* is returned */
178 /* in *complex_d. */
179 /* If the result is LEAF, then we built a LeafDescriptor in */
180 /* the structure pointed to by leaf. */
181 /* The tag in the leaf structure is not set. */
182 /* If the result is SIMPLE, then a GC_descr */
183 /* is returned in *simple_d. */
184 /* If the result is NO_MEM, then */
185 /* we failed to allocate the descriptor. */
186 /* The implementation knows that GC_DS_LENGTH is 0. */
187 /* *leaf, *complex_d, and *simple_d may be used as temporaries */
188 /* during the construction. */
189 # define COMPLEX 2
190 # define LEAF 1
191 # define SIMPLE 0
192 # define NO_MEM (-1)
193 int GC_make_array_descriptor(size_t nelements, size_t size, GC_descr descriptor,
194 GC_descr *simple_d,
195 complex_descriptor **complex_d,
196 struct LeafDescriptor * leaf)
197 {
198 # define OPT_THRESHOLD 50
199 /* For larger arrays, we try to combine descriptors of adjacent */
200 /* descriptors to speed up marking, and to reduce the amount */
201 /* of space needed on the mark stack. */
202 if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
203 if (descriptor == (GC_descr)size) {
204 *simple_d = nelements * descriptor;
205 return(SIMPLE);
206 } else if ((word)descriptor == 0) {
207 *simple_d = (GC_descr)0;
208 return(SIMPLE);
209 }
210 }
211 if (nelements <= OPT_THRESHOLD) {
212 if (nelements <= 1) {
213 if (nelements == 1) {
214 *simple_d = descriptor;
215 return(SIMPLE);
216 } else {
217 *simple_d = (GC_descr)0;
218 return(SIMPLE);
219 }
220 }
221 } else if (size <= BITMAP_BITS/2
222 && (descriptor & GC_DS_TAGS) != GC_DS_PROC
223 && (size & (sizeof(word)-1)) == 0) {
224 int result =
225 GC_make_array_descriptor(nelements/2, 2*size,
226 GC_double_descr(descriptor,
227 BYTES_TO_WORDS(size)),
228 simple_d, complex_d, leaf);
229 if ((nelements & 1) == 0) {
230 return(result);
231 } else {
232 struct LeafDescriptor * one_element =
233 (struct LeafDescriptor *)
234 GC_malloc_atomic(sizeof(struct LeafDescriptor));
235
236 if (result == NO_MEM || one_element == 0) return(NO_MEM);
237 one_element -> ld_tag = LEAF_TAG;
238 one_element -> ld_size = size;
239 one_element -> ld_nelements = 1;
240 one_element -> ld_descriptor = descriptor;
241 switch(result) {
242 case SIMPLE:
243 {
244 struct LeafDescriptor * beginning =
245 (struct LeafDescriptor *)
246 GC_malloc_atomic(sizeof(struct LeafDescriptor));
247 if (beginning == 0) return(NO_MEM);
248 beginning -> ld_tag = LEAF_TAG;
249 beginning -> ld_size = size;
250 beginning -> ld_nelements = 1;
251 beginning -> ld_descriptor = *simple_d;
252 *complex_d = GC_make_sequence_descriptor(
253 (complex_descriptor *)beginning,
254 (complex_descriptor *)one_element);
255 break;
256 }
257 case LEAF:
258 {
259 struct LeafDescriptor * beginning =
260 (struct LeafDescriptor *)
261 GC_malloc_atomic(sizeof(struct LeafDescriptor));
262 if (beginning == 0) return(NO_MEM);
263 beginning -> ld_tag = LEAF_TAG;
264 beginning -> ld_size = leaf -> ld_size;
265 beginning -> ld_nelements = leaf -> ld_nelements;
266 beginning -> ld_descriptor = leaf -> ld_descriptor;
267 *complex_d = GC_make_sequence_descriptor(
268 (complex_descriptor *)beginning,
269 (complex_descriptor *)one_element);
270 break;
271 }
272 case COMPLEX:
273 *complex_d = GC_make_sequence_descriptor(
274 *complex_d,
275 (complex_descriptor *)one_element);
276 break;
277 }
278 return(COMPLEX);
279 }
280 }
281 {
282 leaf -> ld_size = size;
283 leaf -> ld_nelements = nelements;
284 leaf -> ld_descriptor = descriptor;
285 return(LEAF);
286 }
287 }
288
289 complex_descriptor * GC_make_sequence_descriptor(complex_descriptor *first,
290 complex_descriptor *second)
291 {
292 struct SequenceDescriptor * result =
293 (struct SequenceDescriptor *)
294 GC_malloc(sizeof(struct SequenceDescriptor));
295 /* Can't result in overly conservative marking, since tags are */
296 /* very small integers. Probably faster than maintaining type */
297 /* info. */
298 if (result != 0) {
299 result -> sd_tag = SEQUENCE_TAG;
300 result -> sd_first = first;
301 result -> sd_second = second;
302 }
303 return((complex_descriptor *)result);
304 }
305
306 #ifdef UNDEFINED
307 complex_descriptor * GC_make_complex_array_descriptor(word nelements,
308 complex_descriptor *descr)
309 {
310 struct ComplexArrayDescriptor * result =
311 (struct ComplexArrayDescriptor *)
312 GC_malloc(sizeof(struct ComplexArrayDescriptor));
313
314 if (result != 0) {
315 result -> ad_tag = ARRAY_TAG;
316 result -> ad_nelements = nelements;
317 result -> ad_element_descr = descr;
318 }
319 return((complex_descriptor *)result);
320 }
321 #endif
322
323 ptr_t * GC_eobjfreelist;
324
325 ptr_t * GC_arobjfreelist;
326
327 mse * GC_typed_mark_proc(word * addr, mse * mark_stack_ptr,
328 mse * mark_stack_limit, word env);
329
330 mse * GC_array_mark_proc(word * addr, mse * mark_stack_ptr,
331 mse * mark_stack_limit, word env);
332
333 /* Caller does not hold allocation lock. */
334 void GC_init_explicit_typing(void)
335 {
336 register int i;
337 DCL_LOCK_STATE;
338
339
340 /* Ignore gcc "no effect" warning. */
341 GC_STATIC_ASSERT(sizeof(struct LeafDescriptor) % sizeof(word) == 0);
342 LOCK();
343 if (GC_explicit_typing_initialized) {
344 UNLOCK();
345 return;
346 }
347 GC_explicit_typing_initialized = TRUE;
348 /* Set up object kind with simple indirect descriptor. */
349 GC_eobjfreelist = (ptr_t *)GC_new_free_list_inner();
350 GC_explicit_kind = GC_new_kind_inner(
351 (void **)GC_eobjfreelist,
352 (((word)WORDS_TO_BYTES(-1)) | GC_DS_PER_OBJECT),
353 TRUE, TRUE);
354 /* Descriptors are in the last word of the object. */
355 GC_typed_mark_proc_index = GC_new_proc_inner(GC_typed_mark_proc);
356 /* Set up object kind with array descriptor. */
357 GC_arobjfreelist = (ptr_t *)GC_new_free_list_inner();
358 GC_array_mark_proc_index = GC_new_proc_inner(GC_array_mark_proc);
359 GC_array_kind = GC_new_kind_inner(
360 (void **)GC_arobjfreelist,
361 GC_MAKE_PROC(GC_array_mark_proc_index, 0),
362 FALSE, TRUE);
363 for (i = 0; i < WORDSZ/2; i++) {
364 GC_descr d = (((word)(-1)) >> (WORDSZ - i)) << (WORDSZ - i);
365 d |= GC_DS_BITMAP;
366 GC_bm_table[i] = d;
367 }
368 UNLOCK();
369 }
370
371 mse * GC_typed_mark_proc(word * addr, mse * mark_stack_ptr,
372 mse * mark_stack_limit, word env)
373 {
374 word bm = GC_ext_descriptors[env].ed_bitmap;
375 word * current_p = addr;
376 word current;
377 ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
378 ptr_t least_ha = GC_least_plausible_heap_addr;
379 DECLARE_HDR_CACHE;
380
381 INIT_HDR_CACHE;
382 for (; bm != 0; bm >>= 1, current_p++) {
383 if (bm & 1) {
384 current = *current_p;
385 FIXUP_POINTER(current);
386 if ((ptr_t)current >= least_ha && (ptr_t)current <= greatest_ha) {
387 PUSH_CONTENTS((ptr_t)current, mark_stack_ptr,
388 mark_stack_limit, current_p, exit1);
389 }
390 }
391 }
392 if (GC_ext_descriptors[env].ed_continued) {
393 /* Push an entry with the rest of the descriptor back onto the */
394 /* stack. Thus we never do too much work at once. Note that */
395 /* we also can't overflow the mark stack unless we actually */
396 /* mark something. */
397 mark_stack_ptr++;
398 if (mark_stack_ptr >= mark_stack_limit) {
399 mark_stack_ptr = GC_signal_mark_stack_overflow(mark_stack_ptr);
400 }
401 mark_stack_ptr -> mse_start = (ptr_t)(addr + WORDSZ);
402 mark_stack_ptr -> mse_descr =
403 GC_MAKE_PROC(GC_typed_mark_proc_index, env+1);
404 }
405 return(mark_stack_ptr);
406 }
407
408 /* Return the size of the object described by d. It would be faster to */
409 /* store this directly, or to compute it as part of */
410 /* GC_push_complex_descriptor, but hopefully it doesn't matter. */
411 word GC_descr_obj_size(complex_descriptor *d)
412 {
413 switch(d -> TAG) {
414 case LEAF_TAG:
415 return(d -> ld.ld_nelements * d -> ld.ld_size);
416 case ARRAY_TAG:
417 return(d -> ad.ad_nelements
418 * GC_descr_obj_size(d -> ad.ad_element_descr));
419 case SEQUENCE_TAG:
420 return(GC_descr_obj_size(d -> sd.sd_first)
421 + GC_descr_obj_size(d -> sd.sd_second));
422 default:
423 ABORT("Bad complex descriptor");
424 /*NOTREACHED*/ return 0; /*NOTREACHED*/
425 }
426 }
427
428 /* Push descriptors for the object at addr with complex descriptor d */
429 /* onto the mark stack. Return 0 if the mark stack overflowed. */
430 mse * GC_push_complex_descriptor(word *addr, complex_descriptor *d,
431 mse *msp, mse *msl)
432 {
433 register ptr_t current = (ptr_t) addr;
434 register word nelements;
435 register word sz;
436 register word i;
437
438 switch(d -> TAG) {
439 case LEAF_TAG:
440 {
441 register GC_descr descr = d -> ld.ld_descriptor;
442
443 nelements = d -> ld.ld_nelements;
444 if (msl - msp <= (ptrdiff_t)nelements) return(0);
445 sz = d -> ld.ld_size;
446 for (i = 0; i < nelements; i++) {
447 msp++;
448 msp -> mse_start = current;
449 msp -> mse_descr = descr;
450 current += sz;
451 }
452 return(msp);
453 }
454 case ARRAY_TAG:
455 {
456 register complex_descriptor *descr = d -> ad.ad_element_descr;
457
458 nelements = d -> ad.ad_nelements;
459 sz = GC_descr_obj_size(descr);
460 for (i = 0; i < nelements; i++) {
461 msp = GC_push_complex_descriptor((word *)current, descr,
462 msp, msl);
463 if (msp == 0) return(0);
464 current += sz;
465 }
466 return(msp);
467 }
468 case SEQUENCE_TAG:
469 {
470 sz = GC_descr_obj_size(d -> sd.sd_first);
471 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_first,
472 msp, msl);
473 if (msp == 0) return(0);
474 current += sz;
475 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_second,
476 msp, msl);
477 return(msp);
478 }
479 default:
480 ABORT("Bad complex descriptor");
481 /*NOTREACHED*/ return 0; /*NOTREACHED*/
482 }
483 }
484
485 /*ARGSUSED*/
486 mse * GC_array_mark_proc(word * addr, mse * mark_stack_ptr,
487 mse * mark_stack_limit, word env)
488 {
489 hdr * hhdr = HDR(addr);
490 size_t sz = hhdr -> hb_sz;
491 size_t nwords = BYTES_TO_WORDS(sz);
492 complex_descriptor * descr = (complex_descriptor *)(addr[nwords-1]);
493 mse * orig_mark_stack_ptr = mark_stack_ptr;
494 mse * new_mark_stack_ptr;
495
496 if (descr == 0) {
497 /* Found a reference to a free list entry. Ignore it. */
498 return(orig_mark_stack_ptr);
499 }
500 /* In use counts were already updated when array descriptor was */
501 /* pushed. Here we only replace it by subobject descriptors, so */
502 /* no update is necessary. */
503 new_mark_stack_ptr = GC_push_complex_descriptor(addr, descr,
504 mark_stack_ptr,
505 mark_stack_limit-1);
506 if (new_mark_stack_ptr == 0) {
507 /* Doesn't fit. Conservatively push the whole array as a unit */
508 /* and request a mark stack expansion. */
509 /* This cannot cause a mark stack overflow, since it replaces */
510 /* the original array entry. */
511 GC_mark_stack_too_small = TRUE;
512 new_mark_stack_ptr = orig_mark_stack_ptr + 1;
513 new_mark_stack_ptr -> mse_start = (ptr_t)addr;
514 new_mark_stack_ptr -> mse_descr = sz | GC_DS_LENGTH;
515 } else {
516 /* Push descriptor itself */
517 new_mark_stack_ptr++;
518 new_mark_stack_ptr -> mse_start = (ptr_t)(addr + nwords - 1);
519 new_mark_stack_ptr -> mse_descr = sizeof(word) | GC_DS_LENGTH;
520 }
521 return new_mark_stack_ptr;
522 }
523
524 GC_descr GC_make_descriptor(GC_bitmap bm, size_t len)
525 {
526 signed_word last_set_bit = len - 1;
527 GC_descr result;
528 signed_word i;
529 # define HIGH_BIT (((word)1) << (WORDSZ - 1))
530
531 if (!GC_explicit_typing_initialized) GC_init_explicit_typing();
532 while (last_set_bit >= 0 && !GC_get_bit(bm, last_set_bit)) last_set_bit --;
533 if (last_set_bit < 0) return(0 /* no pointers */);
534 # if ALIGNMENT == CPP_WORDSZ/8
535 {
536 register GC_bool all_bits_set = TRUE;
537 for (i = 0; i < last_set_bit; i++) {
538 if (!GC_get_bit(bm, i)) {
539 all_bits_set = FALSE;
540 break;
541 }
542 }
543 if (all_bits_set) {
544 /* An initial section contains all pointers. Use length descriptor. */
545 return (WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
546 }
547 }
548 # endif
549 if (last_set_bit < BITMAP_BITS) {
550 /* Hopefully the common case. */
551 /* Build bitmap descriptor (with bits reversed) */
552 result = HIGH_BIT;
553 for (i = last_set_bit - 1; i >= 0; i--) {
554 result >>= 1;
555 if (GC_get_bit(bm, i)) result |= HIGH_BIT;
556 }
557 result |= GC_DS_BITMAP;
558 return(result);
559 } else {
560 signed_word index;
561
562 index = GC_add_ext_descriptor(bm, (word)last_set_bit+1);
563 if (index == -1) return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
564 /* Out of memory: use conservative */
565 /* approximation. */
566 result = GC_MAKE_PROC(GC_typed_mark_proc_index, (word)index);
567 return result;
568 }
569 }
570
571 /* ptr_t GC_clear_stack(); */
572
573 #define GENERAL_MALLOC(lb,k) \
574 (void *)GC_clear_stack(GC_generic_malloc((word)lb, k))
575
576 #define GENERAL_MALLOC_IOP(lb,k) \
577 (void *)GC_clear_stack(GC_generic_malloc_ignore_off_page(lb, k))
578
579 void * GC_malloc_explicitly_typed(size_t lb, GC_descr d)
580 {
581 ptr_t op;
582 ptr_t * opp;
583 size_t lg;
584 DCL_LOCK_STATE;
585
586 lb += TYPD_EXTRA_BYTES;
587 if(SMALL_OBJ(lb)) {
588 lg = GC_size_map[lb];
589 opp = &(GC_eobjfreelist[lg]);
590 LOCK();
591 if( (op = *opp) == 0 ) {
592 UNLOCK();
593 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
594 if (0 == op) return 0;
595 lg = GC_size_map[lb]; /* May have been uninitialized. */
596 } else {
597 *opp = obj_link(op);
598 obj_link(op) = 0;
599 GC_bytes_allocd += GRANULES_TO_BYTES(lg);
600 UNLOCK();
601 }
602 } else {
603 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
604 if (op != NULL)
605 lg = BYTES_TO_GRANULES(GC_size(op));
606 }
607 if (op != NULL)
608 ((word *)op)[GRANULES_TO_WORDS(lg) - 1] = d;
609 return((void *) op);
610 }
611
612 void * GC_malloc_explicitly_typed_ignore_off_page(size_t lb, GC_descr d)
613 {
614 ptr_t op;
615 ptr_t * opp;
616 size_t lg;
617 DCL_LOCK_STATE;
618
619 lb += TYPD_EXTRA_BYTES;
620 if( SMALL_OBJ(lb) ) {
621 lg = GC_size_map[lb];
622 opp = &(GC_eobjfreelist[lg]);
623 LOCK();
624 if( (op = *opp) == 0 ) {
625 UNLOCK();
626 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
627 lg = GC_size_map[lb]; /* May have been uninitialized. */
628 } else {
629 *opp = obj_link(op);
630 obj_link(op) = 0;
631 GC_bytes_allocd += GRANULES_TO_BYTES(lg);
632 UNLOCK();
633 }
634 } else {
635 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
636 if (op != NULL)
637 lg = BYTES_TO_WORDS(GC_size(op));
638 }
639 if (op != NULL)
640 ((word *)op)[GRANULES_TO_WORDS(lg) - 1] = d;
641 return((void *) op);
642 }
643
644 void * GC_calloc_explicitly_typed(size_t n, size_t lb, GC_descr d)
645 {
646 ptr_t op;
647 ptr_t * opp;
648 size_t lg;
649 GC_descr simple_descr;
650 complex_descriptor *complex_descr;
651 register int descr_type;
652 struct LeafDescriptor leaf;
653 DCL_LOCK_STATE;
654
655 descr_type = GC_make_array_descriptor((word)n, (word)lb, d,
656 &simple_descr, &complex_descr, &leaf);
657 switch(descr_type) {
658 case NO_MEM: return(0);
659 case SIMPLE: return(GC_malloc_explicitly_typed(n*lb, simple_descr));
660 case LEAF:
661 lb *= n;
662 lb += sizeof(struct LeafDescriptor) + TYPD_EXTRA_BYTES;
663 break;
664 case COMPLEX:
665 lb *= n;
666 lb += TYPD_EXTRA_BYTES;
667 break;
668 }
669 if( SMALL_OBJ(lb) ) {
670 lg = GC_size_map[lb];
671 opp = &(GC_arobjfreelist[lg]);
672 LOCK();
673 if( (op = *opp) == 0 ) {
674 UNLOCK();
675 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
676 if (0 == op) return(0);
677 lg = GC_size_map[lb]; /* May have been uninitialized. */
678 } else {
679 *opp = obj_link(op);
680 obj_link(op) = 0;
681 GC_bytes_allocd += GRANULES_TO_BYTES(lg);
682 UNLOCK();
683 }
684 } else {
685 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
686 if (0 == op) return(0);
687 lg = BYTES_TO_GRANULES(GC_size(op));
688 }
689 if (descr_type == LEAF) {
690 /* Set up the descriptor inside the object itself. */
691 volatile struct LeafDescriptor * lp =
692 (struct LeafDescriptor *)
693 ((word *)op
694 + GRANULES_TO_WORDS(lg)
695 - (BYTES_TO_WORDS(sizeof(struct LeafDescriptor)) + 1));
696
697 lp -> ld_tag = LEAF_TAG;
698 lp -> ld_size = leaf.ld_size;
699 lp -> ld_nelements = leaf.ld_nelements;
700 lp -> ld_descriptor = leaf.ld_descriptor;
701 ((volatile word *)op)[GRANULES_TO_WORDS(lg) - 1] = (word)lp;
702 } else {
703 extern unsigned GC_finalization_failures;
704 unsigned ff = GC_finalization_failures;
705 size_t lw = GRANULES_TO_WORDS(lg);
706
707 ((word *)op)[lw - 1] = (word)complex_descr;
708 /* Make sure the descriptor is cleared once there is any danger */
709 /* it may have been collected. */
710 (void)
711 GC_general_register_disappearing_link((void * *)
712 ((word *)op+lw-1),
713 (void *) op);
714 if (ff != GC_finalization_failures) {
715 /* Couldn't register it due to lack of memory. Punt. */
716 /* This will probably fail too, but gives the recovery code */
717 /* a chance. */
718 return(GC_malloc(n*lb));
719 }
720 }
721 return((void *) op);
722 }