gcc/boehm-gc/typd_mlc.c
Jeff Sturm 30c3de1ffb Import GC 6.3alpha1.
* BCC_MAKEFILE: Merge with GC 6.3alpha1 release.
	* ChangeLog: Likewise.
	* Makefile.am: Likewise.
	* Makefile.direct: Likewise.
	* Makefile.dj: Likewise.
	* allchblk.c: Likewise.
	* alloc.c: Likewise.
	* backgraph.c: Likewise.
	* configure.host: Likewise.
	* configure.in: Likewise.
	* dbg_mlc.c: Likewise.
	* dyn_load.c: Likewise.
	* finalize.c: Likewise.
	* gc_cpp.cc: Likewise.
	* gc_dlopen.c: Likewise.
	* gcj_mlc.c: Likewise.
	* if_mach.c: Likewise.
	* mach_dep.c: Likewise.
	* malloc.c: Likewise.
	* mallocx.c: Likewise.
	* mark.c: Likewise.
	* mark_rts.c: Likewise.
	* misc.c: Likewise.
	* os_dep.c: Likewise.
	* ptr_chck.c: Likewise.
	* reclaim.c: Likewise.
	* solaris_pthreads.c: Likewise.
	* solaris_threads.c: Likewise.
	* sparc_mach_dep.S: Likewise.
	* threadlibs.c: Likewise.
	* typd_mlc.c: Likewise.
	* version.h: Likewise.
	* win32_threads.c: Likewise.
	* Mac_files/MacOS_Test_config.h: Likewise.
	* Mac_files/MacOS_config.h: Likewise.
	* cord/cordbscs.c: Likewise.
	* cord/cordprnt.c: Likewise.
	* cord/de_win.c: Likewise.
	* doc/README: Likewise.
	* doc/README.MacOSX: Likewise.
	* doc/README.changes: Likewise.
	* doc/README.environment: Likewise.
	* doc/README.ews4800: Likewise.
	* doc/README.linux: Likewise.
	* doc/README.macros: Likewise.
	* doc/README.win32: Likewise.
	* doc/debugging.html: Likewise.
	* doc/gcdescr.html: Likewise.
	* doc/tree.html: Likewise.
	* include/Makefile.in: Likewise.
	* include/gc.h: Likewise.
	* include/gc_cpp.h: Likewise.
	* include/gc_local_alloc.h: Likewise.
	* include/gc_mark.h: Likewise.
	* include/gc_pthread_redirects.h: Likewise.
	* include/gc_typed.h: Likewise.
	* include/new_gc_alloc.h: Likewise.
	* include/private/dbg_mlc.h: Likewise.
	* include/private/gc_hdrs.h: Likewise.
	* include/private/gc_locks.h: Likewise.
	* include/private/gc_pmark.h: Likewise.
	* include/private/gc_priv.h: Likewise.
	* include/private/gcconfig.h: Likewise.
	* include/private/solaris_threads.h: Likewise.
	* include/private/specific.h: Likewise.
	* tests/test.c: Likewise.
	* tests/test_cpp.cc: Likewise.

	* configure: Rebuild.
	* Makefile.in: Rebuild.

	* mips_sgi_mach_dep.s: Add.

	* alpha_mach_dep.s: Remove.
	* irix_threads.c: Remove.
	* linux_threads.c: Remove.
	* mips_sgi_mach_dep.S: Remove.
	* missing: Remove.
	* powerpc_macosx_mach_dep.s: Remove.
	* doc/Makefile.am: Remove.
	* doc/Makefile.in: Remove.

From-SVN: r69880
2003-07-28 04:18:23 +00:00

834 lines
27 KiB
C

/*
* Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
* opyright (c) 1999-2000 by Hewlett-Packard Company. All rights reserved.
*
* THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
* OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
*
* Permission is hereby granted to use or copy this program
* for any purpose, provided the above notices are retained on all copies.
* Permission to modify the code and to distribute modified code is granted,
* provided the above notices are retained, and a notice that the code was
* modified is included with the above copyright notice.
*
*/
/*
* Some simple primitives for allocation with explicit type information.
* Simple objects are allocated such that they contain a GC_descr at the
* end (in the last allocated word). This descriptor may be a procedure
* which then examines an extended descriptor passed as its environment.
*
* Arrays are treated as simple objects if they have sufficiently simple
* structure. Otherwise they are allocated from an array kind that supplies
* a special mark procedure. These arrays contain a pointer to a
* complex_descriptor as their last word.
* This is done because the environment field is too small, and the collector
* must trace the complex_descriptor.
*
* Note that descriptors inside objects may appear cleared, if we encounter a
* false refrence to an object on a free list. In the GC_descr case, this
* is OK, since a 0 descriptor corresponds to examining no fields.
* In the complex_descriptor case, we explicitly check for that case.
*
* MAJOR PARTS OF THIS CODE HAVE NOT BEEN TESTED AT ALL and are not testable,
* since they are not accessible through the current interface.
*/
#include "private/gc_pmark.h"
#include "gc_typed.h"
# define TYPD_EXTRA_BYTES (sizeof(word) - EXTRA_BYTES)
GC_bool GC_explicit_typing_initialized = FALSE;
int GC_explicit_kind; /* Object kind for objects with indirect */
/* (possibly extended) descriptors. */
int GC_array_kind; /* Object kind for objects with complex */
/* descriptors and GC_array_mark_proc. */
/* Extended descriptors. GC_typed_mark_proc understands these. */
/* These are used for simple objects that are larger than what */
/* can be described by a BITMAP_BITS sized bitmap. */
typedef struct {
word ed_bitmap; /* lsb corresponds to first word. */
GC_bool ed_continued; /* next entry is continuation. */
} ext_descr;
/* Array descriptors. GC_array_mark_proc understands these. */
/* We may eventually need to add provisions for headers and */
/* trailers. Hence we provide for tree structured descriptors, */
/* though we don't really use them currently. */
typedef union ComplexDescriptor {
struct LeafDescriptor { /* Describes simple array */
word ld_tag;
# define LEAF_TAG 1
word ld_size; /* bytes per element */
/* multiple of ALIGNMENT */
word ld_nelements; /* Number of elements. */
GC_descr ld_descriptor; /* A simple length, bitmap, */
/* or procedure descriptor. */
} ld;
struct ComplexArrayDescriptor {
word ad_tag;
# define ARRAY_TAG 2
word ad_nelements;
union ComplexDescriptor * ad_element_descr;
} ad;
struct SequenceDescriptor {
word sd_tag;
# define SEQUENCE_TAG 3
union ComplexDescriptor * sd_first;
union ComplexDescriptor * sd_second;
} sd;
} complex_descriptor;
#define TAG ld.ld_tag
ext_descr * GC_ext_descriptors; /* Points to array of extended */
/* descriptors. */
word GC_ed_size = 0; /* Current size of above arrays. */
# define ED_INITIAL_SIZE 100;
word GC_avail_descr = 0; /* Next available slot. */
int GC_typed_mark_proc_index; /* Indices of my mark */
int GC_array_mark_proc_index; /* procedures. */
/* Add a multiword bitmap to GC_ext_descriptors arrays. Return */
/* starting index. */
/* Returns -1 on failure. */
/* Caller does not hold allocation lock. */
signed_word GC_add_ext_descriptor(bm, nbits)
GC_bitmap bm;
word nbits;
{
register size_t nwords = divWORDSZ(nbits + WORDSZ-1);
register signed_word result;
register word i;
register word last_part;
register int extra_bits;
DCL_LOCK_STATE;
DISABLE_SIGNALS();
LOCK();
while (GC_avail_descr + nwords >= GC_ed_size) {
ext_descr * new;
size_t new_size;
word ed_size = GC_ed_size;
UNLOCK();
ENABLE_SIGNALS();
if (ed_size == 0) {
new_size = ED_INITIAL_SIZE;
} else {
new_size = 2 * ed_size;
if (new_size > MAX_ENV) return(-1);
}
new = (ext_descr *) GC_malloc_atomic(new_size * sizeof(ext_descr));
if (new == 0) return(-1);
DISABLE_SIGNALS();
LOCK();
if (ed_size == GC_ed_size) {
if (GC_avail_descr != 0) {
BCOPY(GC_ext_descriptors, new,
GC_avail_descr * sizeof(ext_descr));
}
GC_ed_size = new_size;
GC_ext_descriptors = new;
} /* else another thread already resized it in the meantime */
}
result = GC_avail_descr;
for (i = 0; i < nwords-1; i++) {
GC_ext_descriptors[result + i].ed_bitmap = bm[i];
GC_ext_descriptors[result + i].ed_continued = TRUE;
}
last_part = bm[i];
/* Clear irrelevant bits. */
extra_bits = nwords * WORDSZ - nbits;
last_part <<= extra_bits;
last_part >>= extra_bits;
GC_ext_descriptors[result + i].ed_bitmap = last_part;
GC_ext_descriptors[result + i].ed_continued = FALSE;
GC_avail_descr += nwords;
UNLOCK();
ENABLE_SIGNALS();
return(result);
}
/* Table of bitmap descriptors for n word long all pointer objects. */
GC_descr GC_bm_table[WORDSZ/2];
/* Return a descriptor for the concatenation of 2 nwords long objects, */
/* each of which is described by descriptor. */
/* The result is known to be short enough to fit into a bitmap */
/* descriptor. */
/* Descriptor is a GC_DS_LENGTH or GC_DS_BITMAP descriptor. */
GC_descr GC_double_descr(descriptor, nwords)
register GC_descr descriptor;
register word nwords;
{
if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
descriptor = GC_bm_table[BYTES_TO_WORDS((word)descriptor)];
};
descriptor |= (descriptor & ~GC_DS_TAGS) >> nwords;
return(descriptor);
}
complex_descriptor * GC_make_sequence_descriptor();
/* Build a descriptor for an array with nelements elements, */
/* each of which can be described by a simple descriptor. */
/* We try to optimize some common cases. */
/* If the result is COMPLEX, then a complex_descr* is returned */
/* in *complex_d. */
/* If the result is LEAF, then we built a LeafDescriptor in */
/* the structure pointed to by leaf. */
/* The tag in the leaf structure is not set. */
/* If the result is SIMPLE, then a GC_descr */
/* is returned in *simple_d. */
/* If the result is NO_MEM, then */
/* we failed to allocate the descriptor. */
/* The implementation knows that GC_DS_LENGTH is 0. */
/* *leaf, *complex_d, and *simple_d may be used as temporaries */
/* during the construction. */
# define COMPLEX 2
# define LEAF 1
# define SIMPLE 0
# define NO_MEM (-1)
int GC_make_array_descriptor(nelements, size, descriptor,
simple_d, complex_d, leaf)
word size;
word nelements;
GC_descr descriptor;
GC_descr *simple_d;
complex_descriptor **complex_d;
struct LeafDescriptor * leaf;
{
# define OPT_THRESHOLD 50
/* For larger arrays, we try to combine descriptors of adjacent */
/* descriptors to speed up marking, and to reduce the amount */
/* of space needed on the mark stack. */
if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
if ((word)descriptor == size) {
*simple_d = nelements * descriptor;
return(SIMPLE);
} else if ((word)descriptor == 0) {
*simple_d = (GC_descr)0;
return(SIMPLE);
}
}
if (nelements <= OPT_THRESHOLD) {
if (nelements <= 1) {
if (nelements == 1) {
*simple_d = descriptor;
return(SIMPLE);
} else {
*simple_d = (GC_descr)0;
return(SIMPLE);
}
}
} else if (size <= BITMAP_BITS/2
&& (descriptor & GC_DS_TAGS) != GC_DS_PROC
&& (size & (sizeof(word)-1)) == 0) {
int result =
GC_make_array_descriptor(nelements/2, 2*size,
GC_double_descr(descriptor,
BYTES_TO_WORDS(size)),
simple_d, complex_d, leaf);
if ((nelements & 1) == 0) {
return(result);
} else {
struct LeafDescriptor * one_element =
(struct LeafDescriptor *)
GC_malloc_atomic(sizeof(struct LeafDescriptor));
if (result == NO_MEM || one_element == 0) return(NO_MEM);
one_element -> ld_tag = LEAF_TAG;
one_element -> ld_size = size;
one_element -> ld_nelements = 1;
one_element -> ld_descriptor = descriptor;
switch(result) {
case SIMPLE:
{
struct LeafDescriptor * beginning =
(struct LeafDescriptor *)
GC_malloc_atomic(sizeof(struct LeafDescriptor));
if (beginning == 0) return(NO_MEM);
beginning -> ld_tag = LEAF_TAG;
beginning -> ld_size = size;
beginning -> ld_nelements = 1;
beginning -> ld_descriptor = *simple_d;
*complex_d = GC_make_sequence_descriptor(
(complex_descriptor *)beginning,
(complex_descriptor *)one_element);
break;
}
case LEAF:
{
struct LeafDescriptor * beginning =
(struct LeafDescriptor *)
GC_malloc_atomic(sizeof(struct LeafDescriptor));
if (beginning == 0) return(NO_MEM);
beginning -> ld_tag = LEAF_TAG;
beginning -> ld_size = leaf -> ld_size;
beginning -> ld_nelements = leaf -> ld_nelements;
beginning -> ld_descriptor = leaf -> ld_descriptor;
*complex_d = GC_make_sequence_descriptor(
(complex_descriptor *)beginning,
(complex_descriptor *)one_element);
break;
}
case COMPLEX:
*complex_d = GC_make_sequence_descriptor(
*complex_d,
(complex_descriptor *)one_element);
break;
}
return(COMPLEX);
}
}
{
leaf -> ld_size = size;
leaf -> ld_nelements = nelements;
leaf -> ld_descriptor = descriptor;
return(LEAF);
}
}
complex_descriptor * GC_make_sequence_descriptor(first, second)
complex_descriptor * first;
complex_descriptor * second;
{
struct SequenceDescriptor * result =
(struct SequenceDescriptor *)
GC_malloc(sizeof(struct SequenceDescriptor));
/* Can't result in overly conservative marking, since tags are */
/* very small integers. Probably faster than maintaining type */
/* info. */
if (result != 0) {
result -> sd_tag = SEQUENCE_TAG;
result -> sd_first = first;
result -> sd_second = second;
}
return((complex_descriptor *)result);
}
#ifdef UNDEFINED
complex_descriptor * GC_make_complex_array_descriptor(nelements, descr)
word nelements;
complex_descriptor * descr;
{
struct ComplexArrayDescriptor * result =
(struct ComplexArrayDescriptor *)
GC_malloc(sizeof(struct ComplexArrayDescriptor));
if (result != 0) {
result -> ad_tag = ARRAY_TAG;
result -> ad_nelements = nelements;
result -> ad_element_descr = descr;
}
return((complex_descriptor *)result);
}
#endif
ptr_t * GC_eobjfreelist;
ptr_t * GC_arobjfreelist;
mse * GC_typed_mark_proc GC_PROTO((register word * addr,
register mse * mark_stack_ptr,
mse * mark_stack_limit,
word env));
mse * GC_array_mark_proc GC_PROTO((register word * addr,
register mse * mark_stack_ptr,
mse * mark_stack_limit,
word env));
GC_descr GC_generic_array_descr;
/* Caller does not hold allocation lock. */
void GC_init_explicit_typing()
{
register int i;
DCL_LOCK_STATE;
# ifdef PRINTSTATS
if (sizeof(struct LeafDescriptor) % sizeof(word) != 0)
ABORT("Bad leaf descriptor size");
# endif
DISABLE_SIGNALS();
LOCK();
if (GC_explicit_typing_initialized) {
UNLOCK();
ENABLE_SIGNALS();
return;
}
GC_explicit_typing_initialized = TRUE;
/* Set up object kind with simple indirect descriptor. */
GC_eobjfreelist = (ptr_t *)
GC_INTERNAL_MALLOC((MAXOBJSZ+1)*sizeof(ptr_t), PTRFREE);
if (GC_eobjfreelist == 0) ABORT("Couldn't allocate GC_eobjfreelist");
BZERO(GC_eobjfreelist, (MAXOBJSZ+1)*sizeof(ptr_t));
GC_explicit_kind = GC_n_kinds++;
GC_obj_kinds[GC_explicit_kind].ok_freelist = GC_eobjfreelist;
GC_obj_kinds[GC_explicit_kind].ok_reclaim_list = 0;
GC_obj_kinds[GC_explicit_kind].ok_descriptor =
(((word)WORDS_TO_BYTES(-1)) | GC_DS_PER_OBJECT);
GC_obj_kinds[GC_explicit_kind].ok_relocate_descr = TRUE;
GC_obj_kinds[GC_explicit_kind].ok_init = TRUE;
/* Descriptors are in the last word of the object. */
GC_typed_mark_proc_index = GC_n_mark_procs;
GC_mark_procs[GC_typed_mark_proc_index] = GC_typed_mark_proc;
GC_n_mark_procs++;
/* Moving this up breaks DEC AXP compiler. */
/* Set up object kind with array descriptor. */
GC_arobjfreelist = (ptr_t *)
GC_INTERNAL_MALLOC((MAXOBJSZ+1)*sizeof(ptr_t), PTRFREE);
if (GC_arobjfreelist == 0) ABORT("Couldn't allocate GC_arobjfreelist");
BZERO(GC_arobjfreelist, (MAXOBJSZ+1)*sizeof(ptr_t));
if (GC_n_mark_procs >= MAX_MARK_PROCS)
ABORT("No slot for array mark proc");
GC_array_mark_proc_index = GC_n_mark_procs++;
if (GC_n_kinds >= MAXOBJKINDS)
ABORT("No kind available for array objects");
GC_array_kind = GC_n_kinds++;
GC_obj_kinds[GC_array_kind].ok_freelist = GC_arobjfreelist;
GC_obj_kinds[GC_array_kind].ok_reclaim_list = 0;
GC_obj_kinds[GC_array_kind].ok_descriptor =
GC_MAKE_PROC(GC_array_mark_proc_index, 0);;
GC_obj_kinds[GC_array_kind].ok_relocate_descr = FALSE;
GC_obj_kinds[GC_array_kind].ok_init = TRUE;
/* Descriptors are in the last word of the object. */
GC_mark_procs[GC_array_mark_proc_index] = GC_array_mark_proc;
for (i = 0; i < WORDSZ/2; i++) {
GC_descr d = (((word)(-1)) >> (WORDSZ - i)) << (WORDSZ - i);
d |= GC_DS_BITMAP;
GC_bm_table[i] = d;
}
GC_generic_array_descr = GC_MAKE_PROC(GC_array_mark_proc_index, 0);
UNLOCK();
ENABLE_SIGNALS();
}
# if defined(__STDC__) || defined(__cplusplus)
mse * GC_typed_mark_proc(register word * addr,
register mse * mark_stack_ptr,
mse * mark_stack_limit,
word env)
# else
mse * GC_typed_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
register word * addr;
register mse * mark_stack_ptr;
mse * mark_stack_limit;
word env;
# endif
{
register word bm = GC_ext_descriptors[env].ed_bitmap;
register word * current_p = addr;
register word current;
register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
register ptr_t least_ha = GC_least_plausible_heap_addr;
for (; bm != 0; bm >>= 1, current_p++) {
if (bm & 1) {
current = *current_p;
FIXUP_POINTER(current);
if ((ptr_t)current >= least_ha && (ptr_t)current <= greatest_ha) {
PUSH_CONTENTS((ptr_t)current, mark_stack_ptr,
mark_stack_limit, current_p, exit1);
}
}
}
if (GC_ext_descriptors[env].ed_continued) {
/* Push an entry with the rest of the descriptor back onto the */
/* stack. Thus we never do too much work at once. Note that */
/* we also can't overflow the mark stack unless we actually */
/* mark something. */
mark_stack_ptr++;
if (mark_stack_ptr >= mark_stack_limit) {
mark_stack_ptr = GC_signal_mark_stack_overflow(mark_stack_ptr);
}
mark_stack_ptr -> mse_start = addr + WORDSZ;
mark_stack_ptr -> mse_descr =
GC_MAKE_PROC(GC_typed_mark_proc_index, env+1);
}
return(mark_stack_ptr);
}
/* Return the size of the object described by d. It would be faster to */
/* store this directly, or to compute it as part of */
/* GC_push_complex_descriptor, but hopefully it doesn't matter. */
word GC_descr_obj_size(d)
register complex_descriptor *d;
{
switch(d -> TAG) {
case LEAF_TAG:
return(d -> ld.ld_nelements * d -> ld.ld_size);
case ARRAY_TAG:
return(d -> ad.ad_nelements
* GC_descr_obj_size(d -> ad.ad_element_descr));
case SEQUENCE_TAG:
return(GC_descr_obj_size(d -> sd.sd_first)
+ GC_descr_obj_size(d -> sd.sd_second));
default:
ABORT("Bad complex descriptor");
/*NOTREACHED*/ return 0; /*NOTREACHED*/
}
}
/* Push descriptors for the object at addr with complex descriptor d */
/* onto the mark stack. Return 0 if the mark stack overflowed. */
mse * GC_push_complex_descriptor(addr, d, msp, msl)
word * addr;
register complex_descriptor *d;
register mse * msp;
mse * msl;
{
register ptr_t current = (ptr_t) addr;
register word nelements;
register word sz;
register word i;
switch(d -> TAG) {
case LEAF_TAG:
{
register GC_descr descr = d -> ld.ld_descriptor;
nelements = d -> ld.ld_nelements;
if (msl - msp <= (ptrdiff_t)nelements) return(0);
sz = d -> ld.ld_size;
for (i = 0; i < nelements; i++) {
msp++;
msp -> mse_start = (word *)current;
msp -> mse_descr = descr;
current += sz;
}
return(msp);
}
case ARRAY_TAG:
{
register complex_descriptor *descr = d -> ad.ad_element_descr;
nelements = d -> ad.ad_nelements;
sz = GC_descr_obj_size(descr);
for (i = 0; i < nelements; i++) {
msp = GC_push_complex_descriptor((word *)current, descr,
msp, msl);
if (msp == 0) return(0);
current += sz;
}
return(msp);
}
case SEQUENCE_TAG:
{
sz = GC_descr_obj_size(d -> sd.sd_first);
msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_first,
msp, msl);
if (msp == 0) return(0);
current += sz;
msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_second,
msp, msl);
return(msp);
}
default:
ABORT("Bad complex descriptor");
/*NOTREACHED*/ return 0; /*NOTREACHED*/
}
}
/*ARGSUSED*/
# if defined(__STDC__) || defined(__cplusplus)
mse * GC_array_mark_proc(register word * addr,
register mse * mark_stack_ptr,
mse * mark_stack_limit,
word env)
# else
mse * GC_array_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
register word * addr;
register mse * mark_stack_ptr;
mse * mark_stack_limit;
word env;
# endif
{
register hdr * hhdr = HDR(addr);
register word sz = hhdr -> hb_sz;
register complex_descriptor * descr = (complex_descriptor *)(addr[sz-1]);
mse * orig_mark_stack_ptr = mark_stack_ptr;
mse * new_mark_stack_ptr;
if (descr == 0) {
/* Found a reference to a free list entry. Ignore it. */
return(orig_mark_stack_ptr);
}
/* In use counts were already updated when array descriptor was */
/* pushed. Here we only replace it by subobject descriptors, so */
/* no update is necessary. */
new_mark_stack_ptr = GC_push_complex_descriptor(addr, descr,
mark_stack_ptr,
mark_stack_limit-1);
if (new_mark_stack_ptr == 0) {
/* Doesn't fit. Conservatively push the whole array as a unit */
/* and request a mark stack expansion. */
/* This cannot cause a mark stack overflow, since it replaces */
/* the original array entry. */
GC_mark_stack_too_small = TRUE;
new_mark_stack_ptr = orig_mark_stack_ptr + 1;
new_mark_stack_ptr -> mse_start = addr;
new_mark_stack_ptr -> mse_descr = WORDS_TO_BYTES(sz) | GC_DS_LENGTH;
} else {
/* Push descriptor itself */
new_mark_stack_ptr++;
new_mark_stack_ptr -> mse_start = addr + sz - 1;
new_mark_stack_ptr -> mse_descr = sizeof(word) | GC_DS_LENGTH;
}
return(new_mark_stack_ptr);
}
#if defined(__STDC__) || defined(__cplusplus)
GC_descr GC_make_descriptor(GC_bitmap bm, size_t len)
#else
GC_descr GC_make_descriptor(bm, len)
GC_bitmap bm;
size_t len;
#endif
{
register signed_word last_set_bit = len - 1;
register word result;
register int i;
# define HIGH_BIT (((word)1) << (WORDSZ - 1))
if (!GC_explicit_typing_initialized) GC_init_explicit_typing();
while (last_set_bit >= 0 && !GC_get_bit(bm, last_set_bit)) last_set_bit --;
if (last_set_bit < 0) return(0 /* no pointers */);
# if ALIGNMENT == CPP_WORDSZ/8
{
register GC_bool all_bits_set = TRUE;
for (i = 0; i < last_set_bit; i++) {
if (!GC_get_bit(bm, i)) {
all_bits_set = FALSE;
break;
}
}
if (all_bits_set) {
/* An initial section contains all pointers. Use length descriptor. */
return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
}
}
# endif
if (last_set_bit < BITMAP_BITS) {
/* Hopefully the common case. */
/* Build bitmap descriptor (with bits reversed) */
result = HIGH_BIT;
for (i = last_set_bit - 1; i >= 0; i--) {
result >>= 1;
if (GC_get_bit(bm, i)) result |= HIGH_BIT;
}
result |= GC_DS_BITMAP;
return(result);
} else {
signed_word index;
index = GC_add_ext_descriptor(bm, (word)last_set_bit+1);
if (index == -1) return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
/* Out of memory: use conservative */
/* approximation. */
result = GC_MAKE_PROC(GC_typed_mark_proc_index, (word)index);
return(result);
}
}
ptr_t GC_clear_stack();
#define GENERAL_MALLOC(lb,k) \
(GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
#define GENERAL_MALLOC_IOP(lb,k) \
(GC_PTR)GC_clear_stack(GC_generic_malloc_ignore_off_page(lb, k))
#if defined(__STDC__) || defined(__cplusplus)
void * GC_malloc_explicitly_typed(size_t lb, GC_descr d)
#else
char * GC_malloc_explicitly_typed(lb, d)
size_t lb;
GC_descr d;
#endif
{
register ptr_t op;
register ptr_t * opp;
register word lw;
DCL_LOCK_STATE;
lb += TYPD_EXTRA_BYTES;
if( SMALL_OBJ(lb) ) {
# ifdef MERGE_SIZES
lw = GC_size_map[lb];
# else
lw = ALIGNED_WORDS(lb);
# endif
opp = &(GC_eobjfreelist[lw]);
FASTLOCK();
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
FASTUNLOCK();
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
if (0 == op) return 0;
# ifdef MERGE_SIZES
lw = GC_size_map[lb]; /* May have been uninitialized. */
# endif
} else {
*opp = obj_link(op);
obj_link(op) = 0;
GC_words_allocd += lw;
FASTUNLOCK();
}
} else {
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
if (op != NULL)
lw = BYTES_TO_WORDS(GC_size(op));
}
if (op != NULL)
((word *)op)[lw - 1] = d;
return((GC_PTR) op);
}
#if defined(__STDC__) || defined(__cplusplus)
void * GC_malloc_explicitly_typed_ignore_off_page(size_t lb, GC_descr d)
#else
char * GC_malloc_explicitly_typed_ignore_off_page(lb, d)
size_t lb;
GC_descr d;
#endif
{
register ptr_t op;
register ptr_t * opp;
register word lw;
DCL_LOCK_STATE;
lb += TYPD_EXTRA_BYTES;
if( SMALL_OBJ(lb) ) {
# ifdef MERGE_SIZES
lw = GC_size_map[lb];
# else
lw = ALIGNED_WORDS(lb);
# endif
opp = &(GC_eobjfreelist[lw]);
FASTLOCK();
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
FASTUNLOCK();
op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
# ifdef MERGE_SIZES
lw = GC_size_map[lb]; /* May have been uninitialized. */
# endif
} else {
*opp = obj_link(op);
obj_link(op) = 0;
GC_words_allocd += lw;
FASTUNLOCK();
}
} else {
op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
if (op != NULL)
lw = BYTES_TO_WORDS(GC_size(op));
}
if (op != NULL)
((word *)op)[lw - 1] = d;
return((GC_PTR) op);
}
#if defined(__STDC__) || defined(__cplusplus)
void * GC_calloc_explicitly_typed(size_t n,
size_t lb,
GC_descr d)
#else
char * GC_calloc_explicitly_typed(n, lb, d)
size_t n;
size_t lb;
GC_descr d;
#endif
{
register ptr_t op;
register ptr_t * opp;
register word lw;
GC_descr simple_descr;
complex_descriptor *complex_descr;
register int descr_type;
struct LeafDescriptor leaf;
DCL_LOCK_STATE;
descr_type = GC_make_array_descriptor((word)n, (word)lb, d,
&simple_descr, &complex_descr, &leaf);
switch(descr_type) {
case NO_MEM: return(0);
case SIMPLE: return(GC_malloc_explicitly_typed(n*lb, simple_descr));
case LEAF:
lb *= n;
lb += sizeof(struct LeafDescriptor) + TYPD_EXTRA_BYTES;
break;
case COMPLEX:
lb *= n;
lb += TYPD_EXTRA_BYTES;
break;
}
if( SMALL_OBJ(lb) ) {
# ifdef MERGE_SIZES
lw = GC_size_map[lb];
# else
lw = ALIGNED_WORDS(lb);
# endif
opp = &(GC_arobjfreelist[lw]);
FASTLOCK();
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
FASTUNLOCK();
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
if (0 == op) return(0);
# ifdef MERGE_SIZES
lw = GC_size_map[lb]; /* May have been uninitialized. */
# endif
} else {
*opp = obj_link(op);
obj_link(op) = 0;
GC_words_allocd += lw;
FASTUNLOCK();
}
} else {
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
if (0 == op) return(0);
lw = BYTES_TO_WORDS(GC_size(op));
}
if (descr_type == LEAF) {
/* Set up the descriptor inside the object itself. */
VOLATILE struct LeafDescriptor * lp =
(struct LeafDescriptor *)
((word *)op
+ lw - (BYTES_TO_WORDS(sizeof(struct LeafDescriptor)) + 1));
lp -> ld_tag = LEAF_TAG;
lp -> ld_size = leaf.ld_size;
lp -> ld_nelements = leaf.ld_nelements;
lp -> ld_descriptor = leaf.ld_descriptor;
((VOLATILE word *)op)[lw - 1] = (word)lp;
} else {
extern unsigned GC_finalization_failures;
unsigned ff = GC_finalization_failures;
((word *)op)[lw - 1] = (word)complex_descr;
/* Make sure the descriptor is cleared once there is any danger */
/* it may have been collected. */
(void)
GC_general_register_disappearing_link((GC_PTR *)
((word *)op+lw-1),
(GC_PTR) op);
if (ff != GC_finalization_failures) {
/* Couldn't register it due to lack of memory. Punt. */
/* This will probably fail too, but gives the recovery code */
/* a chance. */
return(GC_malloc(n*lb));
}
}
return((GC_PTR) op);
}