convert the rest of the users of pointer_map to hash_map

gcc/

	* hash-map.h (default_hashmap_traits): Adjust overloads of hash
	function to not conflict.
	* alias.c, cfgexpand.c, dse.c, except.h, gimple-expr.c,
	gimple-ssa-strength-reduction.c, gimple-ssa.h, ifcvt.c,
	lto-streamer-out.c, lto-streamer.h, tree-affine.c, tree-affine.h,
	tree-predcom.c, tree-scalar-evolution.c, tree-ssa-loop-im.c,
	tree-ssa-loop-niter.c, tree-ssa.c, value-prof.c: Use hash_map instead
	of pointer_map.

gcc/cp/

	* cp-tree.h, pt.c: Use hash_map instead of pointer_map.

gcc/lto/

	* lto-partition.c, lto.c: Use hash_map instead of pointer_map.

From-SVN: r213703
This commit is contained in:
Trevor Saunders 2014-08-07 10:44:14 +00:00 committed by Trevor Saunders
parent 66b5e890ec
commit 39c8aaa4bf
26 changed files with 193 additions and 188 deletions

View File

@ -1,3 +1,14 @@
2014-08-07 Trevor Saunders <tsaunders@mozilla.com>
* hash-map.h (default_hashmap_traits): Adjust overloads of hash
function to not conflict.
* alias.c, cfgexpand.c, dse.c, except.h, gimple-expr.c,
gimple-ssa-strength-reduction.c, gimple-ssa.h, ifcvt.c,
lto-streamer-out.c, lto-streamer.h, tree-affine.c, tree-affine.h,
tree-predcom.c, tree-scalar-evolution.c, tree-ssa-loop-im.c,
tree-ssa-loop-niter.c, tree-ssa.c, value-prof.c: Use hash_map instead
of pointer_map.
2014-08-07 Marek Polacek <polacek@redhat.com>
* fold-const.c (fold_binary_loc): Add folding of

View File

@ -302,10 +302,9 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem)
&& ! is_global_var (base)
&& cfun->gimple_df->decls_to_pointers != NULL)
{
void *namep;
namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
if (namep)
ref->base = build_simple_mem_ref (*(tree *)namep);
ref->base = build_simple_mem_ref (*namep);
}
ref->ref_alias_set = MEM_ALIAS_SET (mem);

View File

@ -216,7 +216,7 @@ struct stack_var
static struct stack_var *stack_vars;
static size_t stack_vars_alloc;
static size_t stack_vars_num;
static struct pointer_map_t *decl_to_stack_part;
static hash_map<tree, size_t> *decl_to_stack_part;
/* Conflict bitmaps go on this obstack. This allows us to destroy
all of them in one big sweep. */
@ -300,10 +300,10 @@ add_stack_var (tree decl)
= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
}
if (!decl_to_stack_part)
decl_to_stack_part = pointer_map_create ();
decl_to_stack_part = new hash_map<tree, size_t>;
v = &stack_vars[stack_vars_num];
* (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
decl_to_stack_part->put (decl, stack_vars_num);
v->decl = decl;
v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
@ -375,7 +375,7 @@ visit_op (gimple, tree op, tree, void *data)
&& DECL_P (op)
&& DECL_RTL_IF_SET (op) == pc_rtx)
{
size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
size_t *v = decl_to_stack_part->get (op);
if (v)
bitmap_set_bit (active, *v);
}
@ -395,8 +395,7 @@ visit_conflict (gimple, tree op, tree, void *data)
&& DECL_P (op)
&& DECL_RTL_IF_SET (op) == pc_rtx)
{
size_t *v =
(size_t *) pointer_map_contains (decl_to_stack_part, op);
size_t *v = decl_to_stack_part->get (op);
if (v && bitmap_set_bit (active, *v))
{
size_t num = *v;
@ -447,8 +446,7 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
if (TREE_CODE (lhs) != VAR_DECL)
continue;
if (DECL_RTL_IF_SET (lhs) == pc_rtx
&& (v = (size_t *)
pointer_map_contains (decl_to_stack_part, lhs)))
&& (v = decl_to_stack_part->get (lhs)))
bitmap_clear_bit (work, *v);
}
else if (!is_gimple_debug (stmt))
@ -587,6 +585,26 @@ stack_var_cmp (const void *a, const void *b)
return 0;
}
struct part_traits : default_hashmap_traits
{
template<typename T>
static bool
is_deleted (T &e)
{ return e.m_value == reinterpret_cast<void *> (1); }
template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
template<typename T>
static void
mark_deleted (T &e)
{ e.m_value = reinterpret_cast<T> (1); }
template<typename T>
static void
mark_empty (T &e)
{ e.m_value = NULL; }
};
typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
/* If the points-to solution *PI points to variables that are in a partition
together with other variables add all partition members to the pointed-to
@ -594,7 +612,7 @@ stack_var_cmp (const void *a, const void *b)
static void
add_partitioned_vars_to_ptset (struct pt_solution *pt,
struct pointer_map_t *decls_to_partitions,
part_hashmap *decls_to_partitions,
hash_set<bitmap> *visited, bitmap temp)
{
bitmap_iterator bi;
@ -616,8 +634,7 @@ add_partitioned_vars_to_ptset (struct pt_solution *pt,
EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
if ((!temp
|| !bitmap_bit_p (temp, i))
&& (part = (bitmap *) pointer_map_contains (decls_to_partitions,
(void *)(size_t) i)))
&& (part = decls_to_partitions->get (i)))
bitmap_ior_into (temp, *part);
if (!bitmap_empty_p (temp))
bitmap_ior_into (pt->vars, temp);
@ -631,7 +648,7 @@ add_partitioned_vars_to_ptset (struct pt_solution *pt,
static void
update_alias_info_with_stack_vars (void)
{
struct pointer_map_t *decls_to_partitions = NULL;
part_hashmap *decls_to_partitions = NULL;
size_t i, j;
tree var = NULL_TREE;
@ -648,8 +665,8 @@ update_alias_info_with_stack_vars (void)
if (!decls_to_partitions)
{
decls_to_partitions = pointer_map_create ();
cfun->gimple_df->decls_to_pointers = pointer_map_create ();
decls_to_partitions = new part_hashmap;
cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
}
/* Create an SSA_NAME that points to the partition for use
@ -667,10 +684,8 @@ update_alias_info_with_stack_vars (void)
tree decl = stack_vars[j].decl;
unsigned int uid = DECL_PT_UID (decl);
bitmap_set_bit (part, uid);
*((bitmap *) pointer_map_insert (decls_to_partitions,
(void *)(size_t) uid)) = part;
*((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
decl)) = name;
decls_to_partitions->put (uid, part);
cfun->gimple_df->decls_to_pointers->put (decl, name);
if (TREE_ADDRESSABLE (decl))
TREE_ADDRESSABLE (name) = 1;
}
@ -703,7 +718,7 @@ update_alias_info_with_stack_vars (void)
add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
decls_to_partitions, &visited, temp);
pointer_map_destroy (decls_to_partitions);
delete decls_to_partitions;
BITMAP_FREE (temp);
}
}
@ -1530,7 +1545,7 @@ init_vars_expansion (void)
bitmap_obstack_initialize (&stack_var_bitmap_obstack);
/* A map from decl to stack partition. */
decl_to_stack_part = pointer_map_create ();
decl_to_stack_part = new hash_map<tree, size_t>;
/* Initialize local stack smashing state. */
has_protected_decls = false;
@ -1549,7 +1564,7 @@ fini_vars_expansion (void)
stack_vars = NULL;
stack_vars_sorted = NULL;
stack_vars_alloc = stack_vars_num = 0;
pointer_map_destroy (decl_to_stack_part);
delete decl_to_stack_part;
decl_to_stack_part = NULL;
}
@ -1666,7 +1681,6 @@ expand_used_vars (void)
tree var, outer_block = DECL_INITIAL (current_function_decl);
vec<tree> maybe_local_decls = vNULL;
rtx var_end_seq = NULL_RTX;
struct pointer_map_t *ssa_name_decls;
unsigned i;
unsigned len;
bool gen_stack_protect_signal = false;
@ -1686,7 +1700,7 @@ expand_used_vars (void)
init_vars_expansion ();
ssa_name_decls = pointer_map_create ();
hash_map<tree, tree> ssa_name_decls;
for (i = 0; i < SA.map->num_partitions; i++)
{
tree var = partition_to_var (SA.map, i);
@ -1697,10 +1711,10 @@ expand_used_vars (void)
we could have coalesced (those with the same type). */
if (SSA_NAME_VAR (var) == NULL_TREE)
{
void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
if (!*slot)
*slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
replace_ssa_name_symbol (var, (tree) *slot);
*slot = create_tmp_reg (TREE_TYPE (var), NULL);
replace_ssa_name_symbol (var, *slot);
}
/* Always allocate space for partitions based on VAR_DECLs. But for
@ -1727,7 +1741,6 @@ expand_used_vars (void)
}
}
}
pointer_map_destroy (ssa_name_decls);
if (flag_stack_protect == SPCT_FLAG_STRONG)
gen_stack_protect_signal
@ -1957,7 +1970,7 @@ maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
/* Maps the blocks that do not contain tree labels to rtx labels. */
static struct pointer_map_t *lab_rtx_for_bb;
static hash_map<basic_block, rtx> *lab_rtx_for_bb;
/* Returns the label_rtx expression for a label starting basic block BB. */
@ -1967,14 +1980,13 @@ label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
gimple_stmt_iterator gsi;
tree lab;
gimple lab_stmt;
void **elt;
if (bb->flags & BB_RTL)
return block_label (bb);
elt = pointer_map_contains (lab_rtx_for_bb, bb);
rtx *elt = lab_rtx_for_bb->get (bb);
if (elt)
return (rtx) *elt;
return *elt;
/* Find the tree label if it is present. */
@ -1991,9 +2003,9 @@ label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
return label_rtx (lab);
}
elt = pointer_map_insert (lab_rtx_for_bb, bb);
*elt = gen_label_rtx ();
return (rtx) *elt;
rtx l = gen_label_rtx ();
lab_rtx_for_bb->put (bb, l);
return l;
}
@ -4878,7 +4890,6 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
rtx note, last;
edge e;
edge_iterator ei;
void **elt;
if (dump_file)
fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
@ -4922,7 +4933,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
stmt = NULL;
}
elt = pointer_map_contains (lab_rtx_for_bb, bb);
rtx *elt = lab_rtx_for_bb->get (bb);
if (stmt || elt)
{
@ -4935,7 +4946,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
}
if (elt)
emit_label ((rtx) *elt);
emit_label (*elt);
/* Java emits line number notes in the top of labels.
??? Make this go away once line number notes are obsoleted. */
@ -5792,7 +5803,7 @@ pass_expand::execute (function *fun)
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
e->flags &= ~EDGE_EXECUTABLE;
lab_rtx_for_bb = pointer_map_create ();
lab_rtx_for_bb = new hash_map<basic_block, rtx>;
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
next_bb)
bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
@ -5816,7 +5827,7 @@ pass_expand::execute (function *fun)
/* Expansion is used by optimization passes too, set maybe_hot_insn_p
conservatively to true until they are all profile aware. */
pointer_map_destroy (lab_rtx_for_bb);
delete lab_rtx_for_bb;
free_histograms ();
construct_exit_block ();

View File

@ -1,3 +1,7 @@
2014-08-07 Trevor Saunders <tsaunders@mozilla.com>
* cp-tree.h, pt.c: Use hash_map instead of pointer_map.
2014-08-06 Jason Merrill <jason@redhat.com>
* init.c (build_vec_init): Fix constant initialization of

View File

@ -25,6 +25,7 @@ along with GCC; see the file COPYING3. If not see
#include "function.h"
#include "hashtab.h"
#include "vec.h"
#include "hash-map.h"
/* In order for the format checking to accept the C++ front end
diagnostic framework extensions, you must include this file before
@ -1064,7 +1065,7 @@ struct GTY(()) saved_scope {
cp_binding_level *class_bindings;
cp_binding_level *bindings;
struct pointer_map_t *x_local_specializations;
hash_map<tree, tree> *GTY((skip)) x_local_specializations;
struct saved_scope *prev;
};

View File

@ -1112,13 +1112,11 @@ retrieve_specialization (tree tmpl, tree args, hashval_t hash)
static tree
retrieve_local_specialization (tree tmpl)
{
void **slot;
if (local_specializations == NULL)
return NULL_TREE;
slot = pointer_map_contains (local_specializations, tmpl);
return slot ? (tree) *slot : NULL_TREE;
tree *slot = local_specializations->get (tmpl);
return slot ? *slot : NULL_TREE;
}
/* Returns nonzero iff DECL is a specialization of TMPL. */
@ -1730,10 +1728,7 @@ reregister_specialization (tree spec, tree tinfo, tree new_spec)
static void
register_local_specialization (tree spec, tree tmpl)
{
void **slot;
slot = pointer_map_insert (local_specializations, tmpl);
*slot = spec;
local_specializations->put (tmpl, spec);
}
/* TYPE is a class type. Returns true if TYPE is an explicitly
@ -9772,7 +9767,7 @@ tsubst_pack_expansion (tree t, tree args, tsubst_flags_t complain,
bool unsubstituted_packs = false;
int i, len = -1;
tree result;
struct pointer_map_t *saved_local_specializations = NULL;
hash_map<tree, tree> *saved_local_specializations = NULL;
bool need_local_specializations = false;
int levels;
@ -9926,7 +9921,7 @@ tsubst_pack_expansion (tree t, tree args, tsubst_flags_t complain,
case of recursive unification) might have bindings that we don't
want to use or alter. */
saved_local_specializations = local_specializations;
local_specializations = pointer_map_create ();
local_specializations = new hash_map<tree, tree>;
}
/* For each argument in each argument pack, substitute into the
@ -9975,7 +9970,7 @@ tsubst_pack_expansion (tree t, tree args, tsubst_flags_t complain,
if (need_local_specializations)
{
pointer_map_destroy (local_specializations);
delete local_specializations;
local_specializations = saved_local_specializations;
}
@ -20089,7 +20084,7 @@ instantiate_decl (tree d, int defer_ok,
synthesize_method (d);
else if (TREE_CODE (d) == FUNCTION_DECL)
{
struct pointer_map_t *saved_local_specializations;
hash_map<tree, tree> *saved_local_specializations;
tree subst_decl;
tree tmpl_parm;
tree spec_parm;
@ -20100,7 +20095,7 @@ instantiate_decl (tree d, int defer_ok,
saved_local_specializations = local_specializations;
/* Set up the list of local specializations. */
local_specializations = pointer_map_create ();
local_specializations = new hash_map<tree, tree>;
/* Set up context. */
if (DECL_OMP_DECLARE_REDUCTION_P (code_pattern)
@ -20164,7 +20159,7 @@ instantiate_decl (tree d, int defer_ok,
}
/* We don't need the local specializations any more. */
pointer_map_destroy (local_specializations);
delete local_specializations;
local_specializations = saved_local_specializations;
/* Finish the function. */

View File

@ -986,10 +986,9 @@ local_variable_can_escape (tree decl)
of the escape analysis. */
if (cfun->gimple_df->decls_to_pointers != NULL)
{
void *namep
= pointer_map_contains (cfun->gimple_df->decls_to_pointers, decl);
tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
if (namep)
return TREE_ADDRESSABLE (*(tree *)namep);
return TREE_ADDRESSABLE (*namep);
}
return false;

View File

@ -30,7 +30,6 @@ along with GCC; see the file COPYING3. If not see
struct function;
struct eh_region_d;
struct pointer_map_t;
/* The type of an exception region. */
enum eh_region_type

View File

@ -883,10 +883,9 @@ mark_addressable (tree x)
&& cfun->gimple_df != NULL
&& cfun->gimple_df->decls_to_pointers != NULL)
{
void *namep
= pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
if (namep)
TREE_ADDRESSABLE (*(tree *)namep) = 1;
TREE_ADDRESSABLE (*namep) = 1;
}
}

View File

@ -434,7 +434,7 @@ cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
static hash_table<cand_chain_hasher> *base_cand_map;
/* Pointer map used by tree_to_aff_combination_expand. */
static struct pointer_map_t *name_expansions;
static hash_map<tree, name_expansion *> *name_expansions;
/* Pointer map embodying a mapping from bases to alternative bases. */
static hash_map<tree, tree> *alt_base_map;

View File

@ -21,6 +21,7 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_GIMPLE_SSA_H
#define GCC_GIMPLE_SSA_H
#include "hash-map.h"
#include "tree-ssa-operands.h"
/* This structure is used to map a gimple statement to a label,
@ -53,7 +54,7 @@ struct GTY(()) gimple_df {
/* A map of decls to artificial ssa-names that point to the partition
of the decl. */
struct pointer_map_t * GTY((skip(""))) decls_to_pointers;
hash_map<tree, tree> * GTY((skip(""))) decls_to_pointers;
/* Free list of SSA_NAMEs. */
vec<tree, va_gc> *free_ssanames;

View File

@ -36,12 +36,9 @@ struct default_hashmap_traits
return uintptr_t(p) >> 3;
}
/* The right thing to do here would be using is_integral to only allow
template arguments of integer type, but reimplementing that is a pain, so
we'll just promote everything to [u]int64_t and truncate to hashval_t. */
/* If the value converts to hashval_t just use it. */
static hashval_t hash (uint64_t v) { return v; }
static hashval_t hash (int64_t v) { return v; }
template<typename T> static hashval_t hash (T v) { return v; }
/* Return true if the two keys passed as arguments are equal. */

View File

@ -2727,7 +2727,7 @@ noce_process_if_block (struct noce_if_info *if_info)
static int
check_cond_move_block (basic_block bb,
struct pointer_map_t *vals,
hash_map<rtx, rtx> *vals,
vec<rtx> *regs,
rtx cond)
{
@ -2742,7 +2742,6 @@ check_cond_move_block (basic_block bb,
FOR_BB_INSNS (bb, insn)
{
rtx set, dest, src;
void **slot;
if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
continue;
@ -2769,14 +2768,14 @@ check_cond_move_block (basic_block bb,
/* Don't try to handle this if the source register was
modified earlier in the block. */
if ((REG_P (src)
&& pointer_map_contains (vals, src))
&& vals->get (src))
|| (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
&& pointer_map_contains (vals, SUBREG_REG (src))))
&& vals->get (SUBREG_REG (src))))
return FALSE;
/* Don't try to handle this if the destination register was
modified earlier in the block. */
if (pointer_map_contains (vals, dest))
if (vals->get (dest))
return FALSE;
/* Don't try to handle this if the condition uses the
@ -2790,8 +2789,7 @@ check_cond_move_block (basic_block bb,
&& modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
return FALSE;
slot = pointer_map_insert (vals, (void *) dest);
*slot = (void *) src;
vals->put (dest, src);
regs->safe_push (dest);
}
@ -2809,8 +2807,8 @@ check_cond_move_block (basic_block bb,
static bool
cond_move_convert_if_block (struct noce_if_info *if_infop,
basic_block bb, rtx cond,
struct pointer_map_t *then_vals,
struct pointer_map_t *else_vals,
hash_map<rtx, rtx> *then_vals,
hash_map<rtx, rtx> *else_vals,
bool else_block_p)
{
enum rtx_code code;
@ -2823,7 +2821,6 @@ cond_move_convert_if_block (struct noce_if_info *if_infop,
FOR_BB_INSNS (bb, insn)
{
rtx set, target, dest, t, e;
void **then_slot, **else_slot;
/* ??? Maybe emit conditional debug insn? */
if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
@ -2833,10 +2830,10 @@ cond_move_convert_if_block (struct noce_if_info *if_infop,
dest = SET_DEST (set);
then_slot = pointer_map_contains (then_vals, dest);
else_slot = pointer_map_contains (else_vals, dest);
t = then_slot ? (rtx) *then_slot : NULL_RTX;
e = else_slot ? (rtx) *else_slot : NULL_RTX;
rtx *then_slot = then_vals->get (dest);
rtx *else_slot = else_vals->get (dest);
t = then_slot ? *then_slot : NULL_RTX;
e = else_slot ? *else_slot : NULL_RTX;
if (else_block_p)
{
@ -2882,8 +2879,6 @@ cond_move_process_if_block (struct noce_if_info *if_info)
rtx seq, loc_insn;
rtx reg;
int c;
struct pointer_map_t *then_vals;
struct pointer_map_t *else_vals;
vec<rtx> then_regs = vNULL;
vec<rtx> else_regs = vNULL;
unsigned int i;
@ -2891,13 +2886,13 @@ cond_move_process_if_block (struct noce_if_info *if_info)
/* Build a mapping for each block to the value used for each
register. */
then_vals = pointer_map_create ();
else_vals = pointer_map_create ();
hash_map<rtx, rtx> then_vals;
hash_map<rtx, rtx> else_vals;
/* Make sure the blocks are suitable. */
if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
if (!check_cond_move_block (then_bb, &then_vals, &then_regs, cond)
|| (else_bb
&& !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
&& !check_cond_move_block (else_bb, &else_vals, &else_regs, cond)))
goto done;
/* Make sure the blocks can be used together. If the same register
@ -2909,16 +2904,16 @@ cond_move_process_if_block (struct noce_if_info *if_info)
c = 0;
FOR_EACH_VEC_ELT (then_regs, i, reg)
{
void **then_slot = pointer_map_contains (then_vals, reg);
void **else_slot = pointer_map_contains (else_vals, reg);
rtx *then_slot = then_vals.get (reg);
rtx *else_slot = else_vals.get (reg);
gcc_checking_assert (then_slot);
if (!else_slot)
++c;
else
{
rtx then_val = (rtx) *then_slot;
rtx else_val = (rtx) *else_slot;
rtx then_val = *then_slot;
rtx else_val = *else_slot;
if (!CONSTANT_P (then_val) && !CONSTANT_P (else_val)
&& !rtx_equal_p (then_val, else_val))
goto done;
@ -2928,8 +2923,8 @@ cond_move_process_if_block (struct noce_if_info *if_info)
/* Finish off c for MAX_CONDITIONAL_EXECUTE. */
FOR_EACH_VEC_ELT (else_regs, i, reg)
{
gcc_checking_assert (pointer_map_contains (else_vals, reg));
if (!pointer_map_contains (then_vals, reg))
gcc_checking_assert (else_vals.get (reg));
if (!then_vals.get (reg))
++c;
}
@ -2944,10 +2939,10 @@ cond_move_process_if_block (struct noce_if_info *if_info)
then do anything left in the else blocks. */
start_sequence ();
if (!cond_move_convert_if_block (if_info, then_bb, cond,
then_vals, else_vals, false)
&then_vals, &else_vals, false)
|| (else_bb
&& !cond_move_convert_if_block (if_info, else_bb, cond,
then_vals, else_vals, true)))
&then_vals, &else_vals, true)))
{
end_sequence ();
goto done;
@ -2988,8 +2983,6 @@ cond_move_process_if_block (struct noce_if_info *if_info)
success_p = TRUE;
done:
pointer_map_destroy (then_vals);
pointer_map_destroy (else_vals);
then_regs.release ();
else_regs.release ();
return success_p;

View File

@ -475,7 +475,7 @@ private:
hash_scc (struct output_block *ob, unsigned first, unsigned size);
unsigned int next_dfs_num;
struct pointer_map_t *sccstate;
hash_map<tree, sccs *> sccstate;
struct obstack sccstate_obstack;
};
@ -483,7 +483,6 @@ DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
bool single_p)
{
sccstack.create (0);
sccstate = pointer_map_create ();
gcc_obstack_init (&sccstate_obstack);
next_dfs_num = 1;
DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p, single_p);
@ -492,7 +491,6 @@ DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
DFS::~DFS ()
{
sccstack.release ();
pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
}
@ -1314,7 +1312,6 @@ DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
tree expr, bool ref_p, bool this_ref_p, bool single_p)
{
unsigned ix;
sccs **slot;
/* Handle special cases. */
if (expr == NULL_TREE)
@ -1328,7 +1325,7 @@ DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
return;
slot = (sccs **)pointer_map_insert (sccstate, expr);
sccs **slot = &sccstate.get_or_insert (expr);
sccs *cstate = *slot;
if (!cstate)
{

View File

@ -561,7 +561,7 @@ struct GTY(()) lto_file_decl_data
struct gcov_ctr_summary GTY((skip)) profile_info;
/* Map assigning declarations their resolutions. */
pointer_map_t * GTY((skip)) resolution_map;
hash_map<tree, ld_plugin_symbol_resolution> * GTY((skip)) resolution_map;
};
typedef struct lto_file_decl_data *lto_file_decl_data_ptr;

View File

@ -1,3 +1,7 @@
2014-08-07 Trevor Saunders <tsaunders@mozilla.com>
* lto-partition.c, lto.c: Use hash_map instead of pointer_map.
2014-08-02 Trevor Saunders <tsaunders@mozilla.com>
* lto-partition.c, lto-partition.h: Use hash_set instead of

View File

@ -268,13 +268,10 @@ lto_1_to_1_map (void)
{
symtab_node *node;
struct lto_file_decl_data *file_data;
struct pointer_map_t *pmap;
hash_map<lto_file_decl_data *, ltrans_partition> pmap;
ltrans_partition partition;
void **slot;
int npartitions = 0;
pmap = pointer_map_create ();
FOR_EACH_SYMBOL (node)
{
if (node->get_partitioning_class () != SYMBOL_PARTITION
@ -285,13 +282,12 @@ lto_1_to_1_map (void)
if (file_data)
{
slot = pointer_map_contains (pmap, file_data);
if (slot)
partition = (ltrans_partition) *slot;
ltrans_partition *slot = &pmap.get_or_insert (file_data);
if (*slot)
partition = *slot;
else
{
partition = new_partition (file_data->file_name);
slot = pointer_map_insert (pmap, file_data);
*slot = partition;
npartitions++;
}
@ -301,8 +297,7 @@ lto_1_to_1_map (void)
else
{
partition = new_partition ("");
slot = pointer_map_insert (pmap, NULL);
*slot = partition;
pmap.put (NULL, partition);
npartitions++;
}
@ -314,8 +309,6 @@ lto_1_to_1_map (void)
if (!npartitions)
new_partition ("empty");
pointer_map_destroy (pmap);
}
/* Maximal partitioning. Put every new symbol into new partition if possible. */

View File

@ -1007,8 +1007,9 @@ register_resolution (struct lto_file_decl_data *file_data, tree decl,
if (resolution == LDPR_UNKNOWN)
return;
if (!file_data->resolution_map)
file_data->resolution_map = pointer_map_create ();
*pointer_map_insert (file_data->resolution_map, decl) = (void *)(size_t)resolution;
file_data->resolution_map
= new hash_map<tree, ld_plugin_symbol_resolution>;
file_data->resolution_map->put (decl, resolution);
}
/* Register DECL with the global symbol table and change its
@ -2887,7 +2888,6 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
FILE *resolution;
int count = 0;
struct lto_file_decl_data **decl_data;
void **res;
symtab_node *snode;
init_cgraph ();
@ -3014,18 +3014,17 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
/* Store resolutions into the symbol table. */
ld_plugin_symbol_resolution_t *res;
FOR_EACH_SYMBOL (snode)
if (snode->real_symbol_p ()
&& snode->lto_file_data
&& snode->lto_file_data->resolution_map
&& (res = pointer_map_contains (snode->lto_file_data->resolution_map,
snode->decl)))
snode->resolution
= (enum ld_plugin_symbol_resolution)(size_t)*res;
&& (res = snode->lto_file_data->resolution_map->get (snode->decl)))
snode->resolution = *res;
for (i = 0; all_file_decl_data[i]; i++)
if (all_file_decl_data[i]->resolution_map)
{
pointer_map_destroy (all_file_decl_data[i]->resolution_map);
delete all_file_decl_data[i]->resolution_map;
all_file_decl_data[i]->resolution_map = NULL;
}

View File

@ -621,14 +621,13 @@ struct name_expansion
void
aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
struct pointer_map_t **cache ATTRIBUTE_UNUSED)
hash_map<tree, name_expansion *> **cache)
{
unsigned i;
aff_tree to_add, current, curre;
tree e, rhs;
gimple def;
widest_int scale;
void **slot;
struct name_expansion *exp;
aff_combination_zero (&to_add, comb->type);
@ -664,9 +663,9 @@ aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
continue;
if (!*cache)
*cache = pointer_map_create ();
slot = pointer_map_insert (*cache, e);
exp = (struct name_expansion *) *slot;
*cache = new hash_map<tree, name_expansion *>;
name_expansion **slot = &(*cache)->get_or_insert (e);
exp = *slot;
if (!exp)
{
@ -732,22 +731,19 @@ aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
void
tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
struct pointer_map_t **cache)
hash_map<tree, name_expansion *> **cache)
{
tree_to_aff_combination (expr, type, comb);
aff_combination_expand (comb, cache);
}
/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
pointer_map_traverse. */
hash_map::traverse. */
static bool
free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
void *data ATTRIBUTE_UNUSED)
bool
free_name_expansion (tree const &, name_expansion **value, void *)
{
struct name_expansion *const exp = (struct name_expansion *) *value;
free (exp);
free (*value);
return true;
}
@ -755,13 +751,13 @@ free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
tree_to_aff_combination_expand. */
void
free_affine_expand_cache (struct pointer_map_t **cache)
free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
{
if (!*cache)
return;
pointer_map_traverse (*cache, free_name_expansion, NULL);
pointer_map_destroy (*cache);
(*cache)->traverse<void *, free_name_expansion> (NULL);
delete (*cache);
*cache = NULL;
}

View File

@ -20,6 +20,7 @@ along with GCC; see the file COPYING3. If not see
/* Affine combination of trees. We keep track of at most MAX_AFF_ELTS elements
to make things simpler; this is sufficient in most cases. */
#include "hash-map.h"
#include "wide-int.h"
#define MAX_AFF_ELTS 8
@ -60,6 +61,8 @@ struct aff_tree
tree rest;
};
struct name_expansion;
widest_int wide_int_ext_for_comb (const widest_int &, aff_tree *);
void aff_combination_const (aff_tree *, tree, const widest_int &);
void aff_combination_elt (aff_tree *, tree, tree);
@ -73,11 +76,11 @@ void tree_to_aff_combination (tree, tree, aff_tree *);
tree aff_combination_to_tree (aff_tree *);
void unshare_aff_combination (aff_tree *);
bool aff_combination_constant_multiple_p (aff_tree *, aff_tree *, widest_int *);
void aff_combination_expand (aff_tree *, struct pointer_map_t **);
void aff_combination_expand (aff_tree *, hash_map<tree, name_expansion *> **);
void tree_to_aff_combination_expand (tree, tree, aff_tree *,
struct pointer_map_t **);
hash_map<tree, name_expansion *> **);
tree get_inner_reference_aff (tree, aff_tree *, widest_int *);
void free_affine_expand_cache (struct pointer_map_t **);
void free_affine_expand_cache (hash_map<tree, name_expansion *> **);
bool aff_comb_cannot_overlap_p (aff_tree *, const widest_int &,
const widest_int &);

View File

@ -350,7 +350,7 @@ static bitmap looparound_phis;
/* Cache used by tree_to_aff_combination_expand. */
static struct pointer_map_t *name_expansions;
static hash_map<tree, name_expansion *> *name_expansions;
/* Dumps data reference REF to FILE. */

View File

@ -1403,7 +1403,7 @@ simplify_peeled_chrec (struct loop *loop, tree arg, tree init_cond)
{
aff_tree aff1, aff2;
tree ev, left, right, type, step_val;
pointer_map_t *peeled_chrec_map = NULL;
hash_map<tree, name_expansion *> *peeled_chrec_map = NULL;
ev = instantiate_parameters (loop, analyze_scalar_evolution (loop, arg));
if (ev == NULL_TREE || TREE_CODE (ev) != POLYNOMIAL_CHREC)

View File

@ -200,7 +200,7 @@ static struct
vec<bitmap_head> all_refs_stored_in_loop;
/* Cache for expanding memory addresses. */
struct pointer_map_t *ttae_cache;
hash_map<tree, name_expansion *> *ttae_cache;
} memory_accesses;
/* Obstack for the bitmaps in the above data structures. */
@ -1610,7 +1610,7 @@ analyze_memory_references (void)
static bool
mem_refs_may_alias_p (mem_ref_p mem1, mem_ref_p mem2,
struct pointer_map_t **ttae_cache)
hash_map<tree, name_expansion *> **ttae_cache)
{
/* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
object and their offset differ in such a way that the locations cannot

View File

@ -3108,14 +3108,12 @@ bound_index (vec<widest_int> bounds, const widest_int &bound)
static void
discover_iteration_bound_by_body_walk (struct loop *loop)
{
pointer_map_t *bb_bounds;
struct nb_iter_bound *elt;
vec<widest_int> bounds = vNULL;
vec<vec<basic_block> > queues = vNULL;
vec<basic_block> queue = vNULL;
ptrdiff_t queue_index;
ptrdiff_t latch_index = 0;
pointer_map_t *block_priority;
/* Discover what bounds may interest us. */
for (elt = loop->bounds; elt; elt = elt->next)
@ -3150,7 +3148,7 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
/* For every basic block record the lowest bound that is guaranteed to
terminate the loop. */
bb_bounds = pointer_map_create ();
hash_map<basic_block, ptrdiff_t> bb_bounds;
for (elt = loop->bounds; elt; elt = elt->next)
{
widest_int bound = elt->bound;
@ -3166,17 +3164,15 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
|| wi::ltu_p (bound, loop->nb_iterations_upper_bound))
{
ptrdiff_t index = bound_index (bounds, bound);
void **entry = pointer_map_contains (bb_bounds,
gimple_bb (elt->stmt));
ptrdiff_t *entry = bb_bounds.get (gimple_bb (elt->stmt));
if (!entry)
*pointer_map_insert (bb_bounds,
gimple_bb (elt->stmt)) = (void *)index;
bb_bounds.put (gimple_bb (elt->stmt), index);
else if ((ptrdiff_t)*entry > index)
*entry = (void *)index;
*entry = index;
}
}
block_priority = pointer_map_create ();
hash_map<basic_block, ptrdiff_t> block_priority;
/* Perform shortest path discovery loop->header ... loop->latch.
@ -3199,7 +3195,7 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
queues.safe_grow_cleared (queue_index + 1);
queue.safe_push (loop->header);
queues[queue_index] = queue;
*pointer_map_insert (block_priority, loop->header) = (void *)queue_index;
block_priority.put (loop->header, queue_index);
for (; queue_index >= 0; queue_index--)
{
@ -3209,7 +3205,6 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
{
basic_block bb;
ptrdiff_t bound_index = queue_index;
void **entry;
edge e;
edge_iterator ei;
@ -3217,20 +3212,19 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
bb = queue.pop ();
/* OK, we later inserted the BB with lower priority, skip it. */
if ((ptrdiff_t)*pointer_map_contains (block_priority, bb) > queue_index)
if (*block_priority.get (bb) > queue_index)
continue;
/* See if we can improve the bound. */
entry = pointer_map_contains (bb_bounds, bb);
if (entry && (ptrdiff_t)*entry < bound_index)
bound_index = (ptrdiff_t)*entry;
ptrdiff_t *entry = bb_bounds.get (bb);
if (entry && *entry < bound_index)
bound_index = *entry;
/* Insert succesors into the queue, watch for latch edge
and record greatest index we saw. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
bool insert = false;
void **entry;
if (loop_exit_edge_p (loop, e))
continue;
@ -3238,15 +3232,15 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
if (e == loop_latch_edge (loop)
&& latch_index < bound_index)
latch_index = bound_index;
else if (!(entry = pointer_map_contains (block_priority, e->dest)))
else if (!(entry = block_priority.get (e->dest)))
{
insert = true;
*pointer_map_insert (block_priority, e->dest) = (void *)bound_index;
block_priority.put (e->dest, bound_index);
}
else if ((ptrdiff_t)*entry < bound_index)
else if (*entry < bound_index)
{
insert = true;
*entry = (void *)bound_index;
*entry = bound_index;
}
if (insert)
@ -3271,8 +3265,6 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
queues.release ();
bounds.release ();
pointer_map_destroy (bb_bounds);
pointer_map_destroy (block_priority);
}
/* See if every path cross the loop goes through a statement that is known

View File

@ -1158,7 +1158,7 @@ delete_tree_ssa (void)
cfun->gimple_df->default_defs = NULL;
pt_solution_reset (&cfun->gimple_df->escaped);
if (cfun->gimple_df->decls_to_pointers != NULL)
pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
delete cfun->gimple_df->decls_to_pointers;
cfun->gimple_df->decls_to_pointers = NULL;
cfun->gimple_df->modified_noreturn_calls = NULL;
cfun->gimple_df = NULL;

View File

@ -1209,7 +1209,22 @@ gimple_mod_subtract_transform (gimple_stmt_iterator *si)
return true;
}
static pointer_map_t *cgraph_node_map = 0;
struct profile_id_traits : default_hashmap_traits
{
template<typename T>
static bool
is_deleted (T &e)
{
return e.m_key == UINT_MAX;
}
template<typename T> static bool is_empty (T &e) { return e.m_key == 0; }
template<typename T> static void mark_deleted (T &e) { e.m_key = UINT_MAX; }
template<typename T> static void mark_empty (T &e) { e.m_key = 0; }
};
static hash_map<unsigned int, cgraph_node *, profile_id_traits> *
cgraph_node_map = 0;
/* Returns true if node graph is initialized. This
is used to test if profile_id has been created
@ -1229,17 +1244,17 @@ void
init_node_map (bool local)
{
struct cgraph_node *n;
cgraph_node_map = pointer_map_create ();
cgraph_node_map
= new hash_map<unsigned int, cgraph_node *, profile_id_traits>;
FOR_EACH_DEFINED_FUNCTION (n)
if (n->has_gimple_body_p ())
{
void **val;
cgraph_node **val;
if (local)
{
n->profile_id = coverage_compute_profile_id (n);
while ((val = pointer_map_contains (cgraph_node_map,
(void *)(size_t)n->profile_id))
while ((val = cgraph_node_map->get (n->profile_id))
|| !n->profile_id)
{
if (dump_file)
@ -1248,8 +1263,8 @@ init_node_map (bool local)
n->profile_id,
n->name (),
n->order,
(*(symtab_node **)val)->name (),
(*(symtab_node **)val)->order);
(*val)->name (),
(*val)->order);
n->profile_id = (n->profile_id + 1) & 0x7fffffff;
}
}
@ -1263,8 +1278,7 @@ init_node_map (bool local)
n->order);
continue;
}
else if ((val = pointer_map_contains (cgraph_node_map,
(void *)(size_t)n->profile_id)))
else if ((val = cgraph_node_map->get (n->profile_id)))
{
if (dump_file)
fprintf (dump_file,
@ -1276,8 +1290,7 @@ init_node_map (bool local)
*val = NULL;
continue;
}
*pointer_map_insert (cgraph_node_map,
(void *)(size_t)n->profile_id) = (void *)n;
cgraph_node_map->put (n->profile_id, n);
}
}
@ -1286,7 +1299,7 @@ init_node_map (bool local)
void
del_node_map (void)
{
pointer_map_destroy (cgraph_node_map);
delete cgraph_node_map;
}
/* Return cgraph node for function with pid */
@ -1294,10 +1307,9 @@ del_node_map (void)
struct cgraph_node*
find_func_by_profile_id (int profile_id)
{
void **val = pointer_map_contains (cgraph_node_map,
(void *)(size_t)profile_id);
cgraph_node **val = cgraph_node_map->get (profile_id);
if (val)
return (struct cgraph_node *)*val;
return *val;
else
return NULL;
}