convert many if_marked htab to hash_table

ada/

	* gcc-interface/decl.c, gcc-interface/utils.c: replace htab with
	hash_table.

cp/

	* cp-objcp-common.c: Use hash_table instead of htab.

gcc/

	* config/i386/i386.c, function.c, trans-mem.c, tree-core.h,
	tree.c, tree.h, ubsan.c, varasm.c: Use hash_table instead of htab.

From-SVN: r217867
This commit is contained in:
Trevor Saunders 2014-11-20 15:10:19 +00:00 committed by Trevor Saunders
parent aebf76a2d6
commit d242408fda
14 changed files with 326 additions and 206 deletions

View File

@ -1,3 +1,8 @@
2014-11-20 Trevor Saunders <tsaunders@mozilla.com>
* config/i386/i386.c, function.c, trans-mem.c, tree-core.h,
tree.c, tree.h, ubsan.c, varasm.c: Use hash_table instead of htab.
2014-11-20 Trevor Saunders <tsaunders@mozilla.com>
* doc/gty.texi: Document the new cache gty attribute.

View File

@ -1,3 +1,8 @@
2014-11-20 Trevor Saunders <tsaunders@mozilla.com>
* gcc-interface/decl.c, gcc-interface/utils.c: replace htab with
hash_table.
2014-11-20 Robert Dewar <dewar@adacore.com>
* sem_prag.adb (Analyze_Pragma, case Elaborate): Forbid pragma

View File

@ -128,8 +128,35 @@ typedef struct variant_desc_d {
/* A hash table used to cache the result of annotate_value. */
static GTY ((if_marked ("tree_int_map_marked_p"),
param_is (struct tree_int_map))) htab_t annotate_value_cache;
struct value_annotation_hasher : ggc_cache_hasher<tree_int_map *>
{
static inline hashval_t
hash (tree_int_map *m)
{
return htab_hash_pointer (m->base.from);
}
static inline bool
equal (tree_int_map *a, tree_int_map *b)
{
return a->base.from == b->base.from;
}
static void
handle_cache_entry (tree_int_map *&m)
{
extern void gt_ggc_mx (tree_int_map *&);
if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (m->base.from))
gt_ggc_mx (m);
else
m = static_cast<tree_int_map *> (HTAB_DELETED_ENTRY);
}
};
static GTY ((cache)) hash_table<value_annotation_hasher> *annotate_value_cache;
static bool allocatable_size_p (tree, bool);
static void prepend_one_attribute (struct attrib **,
@ -7362,7 +7389,7 @@ annotate_value (tree gnu_size)
struct tree_int_map *e;
in.base.from = gnu_size;
e = (struct tree_int_map *) htab_find (annotate_value_cache, &in);
e = annotate_value_cache->find (&in);
if (e)
return (Node_Ref_Or_Val) e->to;
@ -7491,8 +7518,7 @@ annotate_value (tree gnu_size)
look up, so we have to search again. Allocating and inserting an
entry at that point would be an alternative, but then we'd better
discard the entry if we decided not to cache it. */
h = (struct tree_int_map **)
htab_find_slot (annotate_value_cache, &in, INSERT);
h = annotate_value_cache->find_slot (&in, INSERT);
gcc_assert (!*h);
*h = ggc_alloc<tree_int_map> ();
(*h)->base.from = gnu_size;
@ -8840,8 +8866,7 @@ void
init_gnat_decl (void)
{
/* Initialize the cache of annotated values. */
annotate_value_cache
= htab_create_ggc (512, tree_int_map_hash, tree_int_map_eq, 0);
annotate_value_cache = hash_table<value_annotation_hasher>::create_ggc (512);
}
/* Destroy data structures of the decl.c module. */
@ -8850,7 +8875,7 @@ void
destroy_gnat_decl (void)
{
/* Destroy the cache of annotated values. */
htab_delete (annotate_value_cache);
annotate_value_cache->empty ();
annotate_value_cache = NULL;
}

View File

@ -233,20 +233,23 @@ static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
/* A chain of unused BLOCK nodes. */
static GTY((deletable)) tree free_block_chain;
static int pad_type_hash_marked_p (const void *p);
static hashval_t pad_type_hash_hash (const void *p);
static int pad_type_hash_eq (const void *p1, const void *p2);
/* A hash table of padded types. It is modelled on the generic type
hash table in tree.c, which must thus be used as a reference. */
struct GTY(()) pad_type_hash {
struct GTY((for_user)) pad_type_hash {
unsigned long hash;
tree type;
};
static GTY ((if_marked ("pad_type_hash_marked_p"),
param_is (struct pad_type_hash)))
htab_t pad_type_hash_table;
struct pad_type_hasher : ggc_cache_hasher<pad_type_hash *>
{
static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
static bool equal (pad_type_hash *a, pad_type_hash *b);
static void handle_cache_entry (pad_type_hash *&);
};
static GTY ((cache))
hash_table<pad_type_hasher> *pad_type_hash_table;
static tree merge_sizes (tree, tree, tree, bool, bool);
static tree compute_related_constant (tree, tree);
@ -294,8 +297,7 @@ init_gnat_utils (void)
dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
/* Initialize the hash table of padded types. */
pad_type_hash_table
= htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
}
/* Destroy data structures of the utils.c module. */
@ -312,7 +314,7 @@ destroy_gnat_utils (void)
dummy_node_table = NULL;
/* Destroy the hash table of padded types. */
htab_delete (pad_type_hash_table);
pad_type_hash_table->empty ();
pad_type_hash_table = NULL;
/* Invalidate the global renaming pointers. */
@ -1155,29 +1157,23 @@ make_type_from_size (tree type, tree size_tree, bool for_biased)
/* See if the data pointed to by the hash table slot is marked. */
static int
pad_type_hash_marked_p (const void *p)
void
pad_type_hasher::handle_cache_entry (pad_type_hash *&t)
{
const_tree const type = ((const struct pad_type_hash *) p)->type;
return ggc_marked_p (type);
extern void gt_ggc_mx (pad_type_hash *&);
if (t == HTAB_EMPTY_ENTRY || t == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (t->type))
gt_ggc_mx (t);
else
t = static_cast<pad_type_hash *> (HTAB_DELETED_ENTRY);
}
/* Return the cached hash value. */
/* Return true iff the padded types are equivalent. */
static hashval_t
pad_type_hash_hash (const void *p)
bool
pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
{
return ((const struct pad_type_hash *) p)->hash;
}
/* Return 1 iff the padded types are equivalent. */
static int
pad_type_hash_eq (const void *p1, const void *p2)
{
const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
tree type1, type2;
if (t1->hash != t2->hash)
@ -1204,7 +1200,6 @@ lookup_and_insert_pad_type (tree type)
{
hashval_t hashcode;
struct pad_type_hash in, *h;
void **loc;
hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
@ -1214,16 +1209,14 @@ lookup_and_insert_pad_type (tree type)
in.hash = hashcode;
in.type = type;
h = (struct pad_type_hash *)
htab_find_with_hash (pad_type_hash_table, &in, hashcode);
h = pad_type_hash_table->find_with_hash (&in, hashcode);
if (h)
return h->type;
h = ggc_alloc<pad_type_hash> ();
h->hash = hashcode;
h->type = type;
loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode, INSERT);
*loc = (void *)h;
*pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
return NULL_TREE;
}

View File

@ -14055,14 +14055,34 @@ legitimize_tls_address (rtx x, enum tls_model model, bool for_mov)
to symbol DECL if BEIMPORT is true. Otherwise create or return the
unique refptr-DECL symbol corresponding to symbol DECL. */
static GTY((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
htab_t dllimport_map;
struct dllimport_hasher : ggc_cache_hasher<tree_map *>
{
static inline hashval_t hash (tree_map *m) { return m->hash; }
static inline bool
equal (tree_map *a, tree_map *b)
{
return a->base.from == b->base.from;
}
static void
handle_cache_entry (tree_map *&m)
{
extern void gt_ggc_mx (tree_map *&);
if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (m->base.from))
gt_ggc_mx (m);
else
m = static_cast<tree_map *> (HTAB_DELETED_ENTRY);
}
};
static GTY((cache)) hash_table<dllimport_hasher> *dllimport_map;
static tree
get_dllimport_decl (tree decl, bool beimport)
{
struct tree_map *h, in;
void **loc;
const char *name;
const char *prefix;
size_t namelen, prefixlen;
@ -14071,12 +14091,12 @@ get_dllimport_decl (tree decl, bool beimport)
rtx rtl;
if (!dllimport_map)
dllimport_map = htab_create_ggc (512, tree_map_hash, tree_map_eq, 0);
dllimport_map = hash_table<dllimport_hasher>::create_ggc (512);
in.hash = htab_hash_pointer (decl);
in.base.from = decl;
loc = htab_find_slot_with_hash (dllimport_map, &in, in.hash, INSERT);
h = (struct tree_map *) *loc;
tree_map **loc = dllimport_map->find_slot_with_hash (&in, in.hash, INSERT);
h = *loc;
if (h)
return h->to;

View File

@ -1,3 +1,7 @@
2014-11-20 Trevor Saunders <tsaunders@mozilla.com>
* cp-objcp-common.c: Use hash_table instead of htab.
2014-11-19 Jason Merrill <jason@redhat.com>
PR c++/56041

View File

@ -178,8 +178,8 @@ has_c_linkage (const_tree decl)
return DECL_EXTERN_C_P (decl);
}
static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
htab_t shadowed_var_for_decl;
static GTY ((cache))
hash_table<tree_decl_map_cache_hasher> *shadowed_var_for_decl;
/* Lookup a shadowed var for FROM, and return it if we find one. */
@ -189,8 +189,7 @@ decl_shadowed_for_var_lookup (tree from)
struct tree_decl_map *h, in;
in.base.from = from;
h = (struct tree_decl_map *)
htab_find_with_hash (shadowed_var_for_decl, &in, DECL_UID (from));
h = shadowed_var_for_decl->find_with_hash (&in, DECL_UID (from));
if (h)
return h->to;
return NULL_TREE;
@ -202,21 +201,18 @@ void
decl_shadowed_for_var_insert (tree from, tree to)
{
struct tree_decl_map *h;
void **loc;
h = ggc_alloc<tree_decl_map> ();
h->base.from = from;
h->to = to;
loc = htab_find_slot_with_hash (shadowed_var_for_decl, h, DECL_UID (from),
INSERT);
*(struct tree_decl_map **) loc = h;
*shadowed_var_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
}
void
init_shadowed_var_for_decl (void)
{
shadowed_var_for_decl = htab_create_ggc (512, tree_decl_map_hash,
tree_decl_map_eq, 0);
shadowed_var_for_decl
= hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
}
/* Return true if stmt can fall through. Used by block_may_fallthru

View File

@ -116,10 +116,17 @@ struct machine_function * (*init_machine_status) (void);
struct function *cfun = 0;
/* These hashes record the prologue and epilogue insns. */
static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
htab_t prologue_insn_hash;
static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
htab_t epilogue_insn_hash;
struct insn_cache_hasher : ggc_cache_hasher<rtx>
{
static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
static bool equal (rtx a, rtx b) { return a == b; }
};
static GTY((cache))
hash_table<insn_cache_hasher> *prologue_insn_hash;
static GTY((cache))
hash_table<insn_cache_hasher> *epilogue_insn_hash;
hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
@ -136,8 +143,9 @@ static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
/* We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'. */
static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
static bool contains (const_rtx, htab_t);
static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
ATTRIBUTE_UNUSED;
static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
static void prepare_function_start (void);
static void do_clobber_return_reg (rtx, void *);
static void do_use_return_reg (rtx, void *);
@ -5527,18 +5535,17 @@ get_arg_pointer_save_area (void)
for the first time. */
static void
record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
{
rtx_insn *tmp;
htab_t hash = *hashp;
hash_table<insn_cache_hasher> *hash = *hashp;
if (hash == NULL)
*hashp = hash
= htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
*hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
{
void **slot = htab_find_slot (hash, tmp, INSERT);
rtx *slot = hash->find_slot (tmp, INSERT);
gcc_assert (*slot == NULL);
*slot = tmp;
}
@ -5551,18 +5558,18 @@ record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
void
maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
{
htab_t hash;
void **slot;
hash_table<insn_cache_hasher> *hash;
rtx *slot;
hash = epilogue_insn_hash;
if (!hash || !htab_find (hash, insn))
if (!hash || !hash->find (insn))
{
hash = prologue_insn_hash;
if (!hash || !htab_find (hash, insn))
if (!hash || !hash->find (insn))
return;
}
slot = htab_find_slot (hash, copy, INSERT);
slot = hash->find_slot (copy, INSERT);
gcc_assert (*slot == NULL);
*slot = copy;
}
@ -5571,7 +5578,7 @@ maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
we can be running after reorg, SEQUENCE rtl is possible. */
static bool
contains (const_rtx insn, htab_t hash)
contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
{
if (hash == NULL)
return false;
@ -5581,12 +5588,12 @@ contains (const_rtx insn, htab_t hash)
rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
int i;
for (i = seq->len () - 1; i >= 0; i--)
if (htab_find (hash, seq->element (i)))
if (hash->find (seq->element (i)))
return true;
return false;
}
return htab_find (hash, insn) != NULL;
return hash->find (const_cast<rtx> (insn)) != NULL;
}
int
@ -6198,7 +6205,7 @@ reposition_prologue_and_epilogue_notes (void)
non-null is a signal that it is non-empty. */
if (prologue_insn_hash != NULL)
{
size_t len = htab_elements (prologue_insn_hash);
size_t len = prologue_insn_hash->elements ();
rtx_insn *insn, *last = NULL, *note = NULL;
/* Scan from the beginning until we reach the last prologue insn. */

View File

@ -472,8 +472,29 @@ build_tm_abort_call (location_t loc, bool is_outer)
/* Map for aribtrary function replacement under TM, as created
by the tm_wrap attribute. */
static GTY((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
htab_t tm_wrap_map;
struct tm_wrapper_hasher : ggc_cache_hasher<tree_map *>
{
static inline hashval_t hash (tree_map *m) { return m->hash; }
static inline bool
equal (tree_map *a, tree_map *b)
{
return a->base.from == b->base.from;
}
static void
handle_cache_entry (tree_map *&m)
{
extern void gt_ggc_mx (tree_map *&);
if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (m->base.from))
gt_ggc_mx (m);
else
m = static_cast<tree_map *> (HTAB_DELETED_ENTRY);
}
};
static GTY((cache)) hash_table<tm_wrapper_hasher> *tm_wrap_map;
void
record_tm_replacement (tree from, tree to)
@ -489,15 +510,14 @@ record_tm_replacement (tree from, tree to)
DECL_UNINLINABLE (from) = 1;
if (tm_wrap_map == NULL)
tm_wrap_map = htab_create_ggc (32, tree_map_hash, tree_map_eq, 0);
tm_wrap_map = hash_table<tm_wrapper_hasher>::create_ggc (32);
h = ggc_alloc<tree_map> ();
h->hash = htab_hash_pointer (from);
h->base.from = from;
h->to = to;
slot = (struct tree_map **)
htab_find_slot_with_hash (tm_wrap_map, h, h->hash, INSERT);
slot = tm_wrap_map->find_slot_with_hash (h, h->hash, INSERT);
*slot = h;
}
@ -512,7 +532,7 @@ find_tm_replacement_function (tree fndecl)
in.base.from = fndecl;
in.hash = htab_hash_pointer (fndecl);
h = (struct tree_map *) htab_find_with_hash (tm_wrap_map, &in, in.hash);
h = tm_wrap_map->find_with_hash (&in, in.hash);
if (h)
return h->to;
}

View File

@ -1778,26 +1778,26 @@ struct GTY(()) tree_map_base {
/* Map from a tree to another tree. */
struct GTY(()) tree_map {
struct GTY((for_user)) tree_map {
struct tree_map_base base;
unsigned int hash;
tree to;
};
/* Map from a decl tree to another tree. */
struct GTY(()) tree_decl_map {
struct GTY((for_user)) tree_decl_map {
struct tree_map_base base;
tree to;
};
/* Map from a tree to an int. */
struct GTY(()) tree_int_map {
struct GTY((for_user)) tree_int_map {
struct tree_map_base base;
unsigned int to;
};
/* Map from a decl tree to a tree vector. */
struct GTY(()) tree_vec_map {
struct GTY((for_user)) tree_vec_map {
struct tree_map_base base;
vec<tree, va_gc> *to;
};

View File

@ -185,7 +185,7 @@ static GTY(()) int next_debug_decl_uid;
/* Since we cannot rehash a type after it is in the table, we have to
keep the hash code. */
struct GTY(()) type_hash {
struct GTY((for_user)) type_hash {
unsigned long hash;
tree type;
};
@ -193,6 +193,24 @@ struct GTY(()) type_hash {
/* Initial size of the hash table (rounded to next prime). */
#define TYPE_HASH_INITIAL_SIZE 1000
struct type_cache_hasher : ggc_cache_hasher<type_hash *>
{
static hashval_t hash (type_hash *t) { return t->hash; }
static bool equal (type_hash *a, type_hash *b);
static void
handle_cache_entry (type_hash *&t)
{
extern void gt_ggc_mx (type_hash *&);
if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
return;
else if (ggc_marked_p (t->type))
gt_ggc_mx (t);
else
t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
}
};
/* Now here is the hash table. When recording a type, it is added to
the slot whose index is the hash code. Note that the hash table is
used for several kinds of types (function types, array types and
@ -200,8 +218,7 @@ struct GTY(()) type_hash {
same table, they are completely independent, and the hash code is
computed differently for each of these. */
static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
htab_t type_hash_table;
static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
/* Hash table and temporary node for larger integer const values. */
static GTY (()) tree int_cst_node;
@ -233,22 +250,42 @@ static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
/* General tree->tree mapping structure for use in hash tables. */
static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
htab_t debug_expr_for_decl;
static GTY ((cache))
hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
htab_t value_expr_for_decl;
static GTY ((cache))
hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
htab_t debug_args_for_decl;
struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
{
static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
static bool
equal (tree_vec_map *a, tree_vec_map *b)
{
return a->base.from == b->base.from;
}
static void
handle_cache_entry (tree_vec_map *&m)
{
extern void gt_ggc_mx (tree_vec_map *&);
if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (m->base.from))
gt_ggc_mx (m);
else
m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
}
};
static GTY ((cache))
hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
static void set_type_quals (tree, int);
static int type_hash_eq (const void *, const void *);
static hashval_t type_hash_hash (const void *);
static void print_type_hash_statistics (void);
static void print_debug_expr_statistics (void);
static void print_value_expr_statistics (void);
static int type_hash_marked_p (const void *);
static void type_hash_list (const_tree, inchash::hash &);
static void attribute_hash_list (const_tree, inchash::hash &);
@ -584,14 +621,14 @@ void
init_ttree (void)
{
/* Initialize the hash table of types. */
type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
type_hash_eq, 0);
type_hash_table
= hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
tree_decl_map_eq, 0);
debug_expr_for_decl
= hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
tree_decl_map_eq, 0);
value_expr_for_decl
= hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
@ -6573,9 +6610,9 @@ static void
print_debug_expr_statistics (void)
{
fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
(long) htab_size (debug_expr_for_decl),
(long) htab_elements (debug_expr_for_decl),
htab_collisions (debug_expr_for_decl));
(long) debug_expr_for_decl->size (),
(long) debug_expr_for_decl->elements (),
debug_expr_for_decl->collisions ());
}
/* Print out the statistics for the DECL_VALUE_EXPR hash table. */
@ -6584,9 +6621,9 @@ static void
print_value_expr_statistics (void)
{
fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
(long) htab_size (value_expr_for_decl),
(long) htab_elements (value_expr_for_decl),
htab_collisions (value_expr_for_decl));
(long) value_expr_for_decl->size (),
(long) value_expr_for_decl->elements (),
value_expr_for_decl->collisions ());
}
/* Lookup a debug expression for FROM, and return it if we find one. */
@ -6597,8 +6634,7 @@ decl_debug_expr_lookup (tree from)
struct tree_decl_map *h, in;
in.base.from = from;
h = (struct tree_decl_map *)
htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
if (h)
return h->to;
return NULL_TREE;
@ -6610,14 +6646,11 @@ void
decl_debug_expr_insert (tree from, tree to)
{
struct tree_decl_map *h;
void **loc;
h = ggc_alloc<tree_decl_map> ();
h->base.from = from;
h->to = to;
loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
INSERT);
*(struct tree_decl_map **) loc = h;
*debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
}
/* Lookup a value expression for FROM, and return it if we find one. */
@ -6628,8 +6661,7 @@ decl_value_expr_lookup (tree from)
struct tree_decl_map *h, in;
in.base.from = from;
h = (struct tree_decl_map *)
htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
if (h)
return h->to;
return NULL_TREE;
@ -6641,14 +6673,11 @@ void
decl_value_expr_insert (tree from, tree to)
{
struct tree_decl_map *h;
void **loc;
h = ggc_alloc<tree_decl_map> ();
h->base.from = from;
h->to = to;
loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
INSERT);
*(struct tree_decl_map **) loc = h;
*value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
}
/* Lookup a vector of debug arguments for FROM, and return it if we
@ -6663,8 +6692,7 @@ decl_debug_args_lookup (tree from)
return NULL;
gcc_checking_assert (debug_args_for_decl != NULL);
in.base.from = from;
h = (struct tree_vec_map *)
htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
if (h)
return &h->to;
return NULL;
@ -6677,19 +6705,17 @@ vec<tree, va_gc> **
decl_debug_args_insert (tree from)
{
struct tree_vec_map *h;
void **loc;
tree_vec_map **loc;
if (DECL_HAS_DEBUG_ARGS_P (from))
return decl_debug_args_lookup (from);
if (debug_args_for_decl == NULL)
debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
tree_vec_map_eq, 0);
debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
h = ggc_alloc<tree_vec_map> ();
h->base.from = from;
h->to = NULL;
loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
INSERT);
*(struct tree_vec_map **) loc = h;
loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
*loc = h;
DECL_HAS_DEBUG_ARGS_P (from) = 1;
return &h->to;
}
@ -6715,12 +6741,9 @@ type_hash_list (const_tree list, inchash::hash &hstate)
/* Returns true iff the types are equivalent. */
static int
type_hash_eq (const void *va, const void *vb)
bool
type_cache_hasher::equal (type_hash *a, type_hash *b)
{
const struct type_hash *const a = (const struct type_hash *) va,
*const b = (const struct type_hash *) vb;
/* First test the things that are the same for all types. */
if (a->hash != b->hash
|| TREE_CODE (a->type) != TREE_CODE (b->type)
@ -6827,14 +6850,6 @@ type_hash_eq (const void *va, const void *vb)
return 1;
}
/* Return the cached hash value. */
static hashval_t
type_hash_hash (const void *item)
{
return ((const struct type_hash *) item)->hash;
}
/* Given TYPE, and HASHCODE its hash code, return the canonical
object for an identical type if one already exists.
Otherwise, return TYPE, and record it as the canonical object.
@ -6848,7 +6863,7 @@ tree
type_hash_canon (unsigned int hashcode, tree type)
{
type_hash in;
void **loc;
type_hash **loc;
/* The hash table only contains main variants, so ensure that's what we're
being passed. */
@ -6861,7 +6876,7 @@ type_hash_canon (unsigned int hashcode, tree type)
in.hash = hashcode;
in.type = type;
loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
if (*loc)
{
tree t1 = ((type_hash *) *loc)->type;
@ -6881,31 +6896,19 @@ type_hash_canon (unsigned int hashcode, tree type)
h = ggc_alloc<type_hash> ();
h->hash = hashcode;
h->type = type;
*loc = (void *)h;
*loc = h;
return type;
}
}
/* See if the data pointed to by the type hash table is marked. We consider
it marked if the type is marked or if a debug type number or symbol
table entry has been made for the type. */
static int
type_hash_marked_p (const void *p)
{
const_tree const type = ((const struct type_hash *) p)->type;
return ggc_marked_p (type);
}
static void
print_type_hash_statistics (void)
{
fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
(long) htab_size (type_hash_table),
(long) htab_elements (type_hash_table),
htab_collisions (type_hash_table));
(long) type_hash_table->size (),
(long) type_hash_table->elements (),
type_hash_table->collisions ());
}
/* Compute a hash code for a list of attributes (chain of TREE_LIST nodes

View File

@ -4402,6 +4402,28 @@ extern unsigned int tree_map_hash (const void *);
extern unsigned int tree_decl_map_hash (const void *);
#define tree_decl_map_marked_p tree_map_base_marked_p
struct tree_decl_map_cache_hasher : ggc_cache_hasher<tree_decl_map *>
{
static hashval_t hash (tree_decl_map *m) { return tree_decl_map_hash (m); }
static bool
equal (tree_decl_map *a, tree_decl_map *b)
{
return tree_decl_map_eq (a, b);
}
static void
handle_cache_entry (tree_decl_map *&m)
{
extern void gt_ggc_mx (tree_decl_map *&);
if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (m->base.from))
gt_ggc_mx (m);
else
m = static_cast<tree_decl_map *> (HTAB_DELETED_ENTRY);
}
};
#define tree_int_map_eq tree_map_base_eq
#define tree_int_map_hash tree_map_base_hash
#define tree_int_map_marked_p tree_map_base_marked_p

View File

@ -71,24 +71,40 @@ along with GCC; see the file COPYING3. If not see
/* Map from a tree to a VAR_DECL tree. */
struct GTY(()) tree_type_map {
struct GTY((for_user)) tree_type_map {
struct tree_map_base type;
tree decl;
};
#define tree_type_map_eq tree_map_base_eq
#define tree_type_map_marked_p tree_map_base_marked_p
/* Hash from a tree in a tree_type_map. */
unsigned int
tree_type_map_hash (const void *item)
struct tree_type_map_cache_hasher : ggc_cache_hasher<tree_type_map *>
{
return TYPE_UID (((const struct tree_type_map *)item)->type.from);
}
static inline hashval_t
hash (tree_type_map *t)
{
return TYPE_UID (t->type.from);
}
static GTY ((if_marked ("tree_type_map_marked_p"), param_is (struct tree_type_map)))
htab_t decl_tree_for_type;
static inline bool
equal (tree_type_map *a, tree_type_map *b)
{
return a->type.from == b->type.from;
}
static void
handle_cache_entry (tree_type_map *&m)
{
extern void gt_ggc_mx (tree_type_map *&);
if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
return;
else if (ggc_marked_p (m->type.from))
gt_ggc_mx (m);
else
m = static_cast<tree_type_map *> (HTAB_DELETED_ENTRY);
}
};
static GTY ((cache))
hash_table<tree_type_map_cache_hasher> *decl_tree_for_type;
/* Lookup a VAR_DECL for TYPE, and return it if we find one. */
@ -98,8 +114,8 @@ decl_for_type_lookup (tree type)
/* If the hash table is not initialized yet, create it now. */
if (decl_tree_for_type == NULL)
{
decl_tree_for_type = htab_create_ggc (10, tree_type_map_hash,
tree_type_map_eq, 0);
decl_tree_for_type
= hash_table<tree_type_map_cache_hasher>::create_ggc (10);
/* That also means we don't have to bother with the lookup. */
return NULL_TREE;
}
@ -107,8 +123,7 @@ decl_for_type_lookup (tree type)
struct tree_type_map *h, in;
in.type.from = type;
h = (struct tree_type_map *)
htab_find_with_hash (decl_tree_for_type, &in, TYPE_UID (type));
h = decl_tree_for_type->find_with_hash (&in, TYPE_UID (type));
return h ? h->decl : NULL_TREE;
}
@ -118,14 +133,11 @@ static void
decl_for_type_insert (tree type, tree decl)
{
struct tree_type_map *h;
void **slot;
h = ggc_alloc<tree_type_map> ();
h->type.from = type;
h->decl = decl;
slot = htab_find_slot_with_hash (decl_tree_for_type, h, TYPE_UID (type),
INSERT);
*(struct tree_type_map **) slot = h;
*decl_tree_for_type->find_slot_with_hash (h, TYPE_UID (type), INSERT) = h;
}
/* Helper routine, which encodes a value in the pointer_sized_int_node.

View File

@ -5727,8 +5727,26 @@ assemble_alias (tree decl, tree target)
to its transaction aware clone. Note that tm_pure functions are
considered to be their own clone. */
static GTY((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
htab_t tm_clone_hash;
struct tm_clone_hasher : ggc_cache_hasher<tree_map *>
{
static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
static void handle_cache_entry (tree_map *&e)
{
if (e != HTAB_EMPTY_ENTRY || e != HTAB_DELETED_ENTRY)
{
extern void gt_ggc_mx (tree_map *&);
if (ggc_marked_p (e->base.from))
gt_ggc_mx (e);
else
e = static_cast<tree_map *> (HTAB_DELETED_ENTRY);
}
}
};
static GTY((cache))
hash_table<tm_clone_hasher> *tm_clone_hash;
void
record_tm_clone_pair (tree o, tree n)
@ -5736,15 +5754,14 @@ record_tm_clone_pair (tree o, tree n)
struct tree_map **slot, *h;
if (tm_clone_hash == NULL)
tm_clone_hash = htab_create_ggc (32, tree_map_hash, tree_map_eq, 0);
tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
h = ggc_alloc<tree_map> ();
h->hash = htab_hash_pointer (o);
h->base.from = o;
h->to = n;
slot = (struct tree_map **)
htab_find_slot_with_hash (tm_clone_hash, h, h->hash, INSERT);
slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
*slot = h;
}
@ -5757,8 +5774,7 @@ get_tm_clone_pair (tree o)
in.base.from = o;
in.hash = htab_hash_pointer (o);
h = (struct tree_map *) htab_find_with_hash (tm_clone_hash,
&in, in.hash);
h = tm_clone_hash->find_with_hash (&in, in.hash);
if (h)
return h->to;
}
@ -5773,19 +5789,6 @@ typedef struct tm_alias_pair
} tm_alias_pair;
/* Helper function for finish_tm_clone_pairs. Dump a hash table entry
into a VEC in INFO. */
static int
dump_tm_clone_to_vec (void **slot, void *info)
{
struct tree_map *map = (struct tree_map *) *slot;
vec<tm_alias_pair> *tm_alias_pairs = (vec<tm_alias_pair> *) info;
tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
tm_alias_pairs->safe_push (p);
return 1;
}
/* Dump the actual pairs to the .tm_clone_table section. */
static void
@ -5866,15 +5869,20 @@ finish_tm_clone_pairs (void)
to a vector, sort it, and dump the vector. */
/* Dump the hashtable to a vector. */
htab_traverse_noresize (tm_clone_hash, dump_tm_clone_to_vec,
(void *) &tm_alias_pairs);
tree_map *map;
hash_table<tm_clone_hasher>::iterator iter;
FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
{
tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
tm_alias_pairs.safe_push (p);
}
/* Sort it. */
tm_alias_pairs.qsort (tm_alias_pair_cmp);
/* Dump it. */
dump_tm_clone_pairs (tm_alias_pairs);
htab_delete (tm_clone_hash);
tm_clone_hash->empty ();
tm_clone_hash = NULL;
tm_alias_pairs.release ();
}