tree-tailcall.c (find_tail_calls): Use XNEW.

* tree-tailcall.c (find_tail_calls): Use XNEW.
        * tree-ssa-dom.c (allocate_edge_info): Use XCNEW.
        (free_all_edge_infos): Use explicit cast to convert from void
        * *.
        (vrp_free): Likewise.
        (dom_opt_finalize_block): Likewise.
        (record_equivalences_from_incoming_edge): Likewise.
        (thread_across_edge): Likewise.  Use XCNEWVEC.
        (record_cond): Use XCNEW.
        (record_conditions): Use XNEWVEC.
        (record_edge_info): Use XCNEWVEC.
        (lookup_avail_expr): Use XNEW.
        (record_range): Likewise.  Use GGC_NEW.
        * tree-nested.c (var_map_hash): Use explicit cast to convert
        * from
        void *.
        (var_map_eq): Likewise.
        (lookup_field_for_decl): Likewise.
        (convert_nonlocal_reference): Likewise.
        (convert_local_reference): Likewise.
        (convert_nl_goto_reference): Likewise.
        (convert_nl_goto_receiver): Likewise.
        (convert_call_expr): Likewise.
        (convert_tramp_reference): Likewise.
        (lookup_tramp_for_decl): Likewise.Use GGC_NEW.
        (convert_nl_goto_reference): Likewise.
        (lookup_field_for_decl): Use GGC_NEW.
        (create_nesting_tree): Use GGC_CNEW.
        * tree-ssa-phiopt.c (blocks_in_phiopt_order): Use XNEWVEC.
        * tree-ssa-alias.c (init_alias_info): Use XCNEW.
        (create_alias_map_for): Likewise.
        (setup_pointers_and_addressables): Use XCNEWVEC.
        (get_ptr_info): Use GGC_NEW.
        (used_part_map_eq): Use explicit cast to convert from void *.
        (up_lookup): Likewise.
        (up_insert): Use XNEW.
        (get_or_create_used_part_for): Use XCNEW.
        (get_tmt_for): Likewise.
        * tree-ssa-operands.c (ssa_operand_alloc): Use GGC_NEW.
        * tree-ssa-pre.c (phi_trans_add): Use XNEW.
        (bitmap_set_new): Use explicit cast to convert from void *.
        (set_new): Likewise.
        (insert_into_set): Likewise.
        (pool_copy_list): Likewise.
        (phi_translate): Likewise.
        (create_value_expr_from): Likewise.
        (insert_aux): Use XCNEWVEC.
        (compute_avail): Use XNEWVEC.
        * tree-ssa-live.c (calculate_live_on_entry): Likewise.
        (sort_coalesce_list): Likewise.
        (build_tree_conflict_graph): Use XCNEWVEC.
        * tree-ssa-dce.c (tree_dce_init): Use XNEWVEC.
        * tree-ssa-copy.c (init_copy_prop): Likewise.
        (fini_copy_prop): Likewise.
        * tree-ssa-uncprop.c (associate_equivalences_with_edges): Use
        * XNEW
        and XCNEWVEC.
        (record_equiv): Use XNEW.
        (uncprop_into_successor_phis): Use explicit cast to convert
        * from
        void *.
        (uncprop_initialize_block): Likewise.

From-SVN: r108747
This commit is contained in:
Gabriel Dos Reis 2005-12-18 09:26:53 +00:00 committed by Gabriel Dos Reis
parent a1bdf726b7
commit e1111e8ebe
12 changed files with 149 additions and 89 deletions

View File

@ -1,3 +1,64 @@
2005-12-17 Gabriel Dos Reis <gdr@integrable-solutions.net>
* tree-tailcall.c (find_tail_calls): Use XNEW.
* tree-ssa-dom.c (allocate_edge_info): Use XCNEW.
(free_all_edge_infos): Use explicit cast to convert from void *.
(vrp_free): Likewise.
(dom_opt_finalize_block): Likewise.
(record_equivalences_from_incoming_edge): Likewise.
(thread_across_edge): Likewise. Use XCNEWVEC.
(record_cond): Use XCNEW.
(record_conditions): Use XNEWVEC.
(record_edge_info): Use XCNEWVEC.
(lookup_avail_expr): Use XNEW.
(record_range): Likewise. Use GGC_NEW.
* tree-nested.c (var_map_hash): Use explicit cast to convert from
void *.
(var_map_eq): Likewise.
(lookup_field_for_decl): Likewise.
(convert_nonlocal_reference): Likewise.
(convert_local_reference): Likewise.
(convert_nl_goto_reference): Likewise.
(convert_nl_goto_receiver): Likewise.
(convert_call_expr): Likewise.
(convert_tramp_reference): Likewise.
(lookup_tramp_for_decl): Likewise.Use GGC_NEW.
(convert_nl_goto_reference): Likewise.
(lookup_field_for_decl): Use GGC_NEW.
(create_nesting_tree): Use GGC_CNEW.
* tree-ssa-phiopt.c (blocks_in_phiopt_order): Use XNEWVEC.
* tree-ssa-alias.c (init_alias_info): Use XCNEW.
(create_alias_map_for): Likewise.
(setup_pointers_and_addressables): Use XCNEWVEC.
(get_ptr_info): Use GGC_NEW.
(used_part_map_eq): Use explicit cast to convert from void *.
(up_lookup): Likewise.
(up_insert): Use XNEW.
(get_or_create_used_part_for): Use XCNEW.
(get_tmt_for): Likewise.
* tree-ssa-operands.c (ssa_operand_alloc): Use GGC_NEW.
* tree-ssa-pre.c (phi_trans_add): Use XNEW.
(bitmap_set_new): Use explicit cast to convert from void *.
(set_new): Likewise.
(insert_into_set): Likewise.
(pool_copy_list): Likewise.
(phi_translate): Likewise.
(create_value_expr_from): Likewise.
(insert_aux): Use XCNEWVEC.
(compute_avail): Use XNEWVEC.
* tree-ssa-live.c (calculate_live_on_entry): Likewise.
(sort_coalesce_list): Likewise.
(build_tree_conflict_graph): Use XCNEWVEC.
* tree-ssa-dce.c (tree_dce_init): Use XNEWVEC.
* tree-ssa-copy.c (init_copy_prop): Likewise.
(fini_copy_prop): Likewise.
* tree-ssa-uncprop.c (associate_equivalences_with_edges): Use XNEW
and XCNEWVEC.
(record_equiv): Use XNEW.
(uncprop_into_successor_phis): Use explicit cast to convert from
void *.
(uncprop_initialize_block): Likewise.
2005-12-18 Dorit Nuzman <dorit@il.ibm.com> 2005-12-18 Dorit Nuzman <dorit@il.ibm.com>
PR tree-optimization/24378 PR tree-optimization/24378

View File

@ -108,15 +108,15 @@ struct nesting_info GTY ((chain_next ("%h.next")))
static hashval_t static hashval_t
var_map_hash (const void *x) var_map_hash (const void *x)
{ {
const struct var_map_elt *a = x; const struct var_map_elt *a = (const struct var_map_elt *) x;
return htab_hash_pointer (a->old); return htab_hash_pointer (a->old);
} }
static int static int
var_map_eq (const void *x, const void *y) var_map_eq (const void *x, const void *y)
{ {
const struct var_map_elt *a = x; const struct var_map_elt *a = (const struct var_map_elt *) x;
const struct var_map_elt *b = y; const struct var_map_elt *b = (const struct var_map_elt *) y;
return a->old == b->old; return a->old == b->old;
} }
@ -270,7 +270,7 @@ lookup_field_for_decl (struct nesting_info *info, tree decl,
gcc_assert (insert != INSERT); gcc_assert (insert != INSERT);
return NULL; return NULL;
} }
elt = *slot; elt = (struct var_map_elt *) *slot;
if (!elt && insert == INSERT) if (!elt && insert == INSERT)
{ {
@ -296,7 +296,7 @@ lookup_field_for_decl (struct nesting_info *info, tree decl,
insert_field_into_struct (get_frame_type (info), field); insert_field_into_struct (get_frame_type (info), field);
elt = ggc_alloc (sizeof (*elt)); elt = GGC_NEW (struct var_map_elt);
elt->old = decl; elt->old = decl;
elt->new = field; elt->new = field;
*slot = elt; *slot = elt;
@ -471,7 +471,7 @@ lookup_tramp_for_decl (struct nesting_info *info, tree decl,
gcc_assert (insert != INSERT); gcc_assert (insert != INSERT);
return NULL; return NULL;
} }
elt = *slot; elt = (struct var_map_elt *) *slot;
if (!elt && insert == INSERT) if (!elt && insert == INSERT)
{ {
@ -482,7 +482,7 @@ lookup_tramp_for_decl (struct nesting_info *info, tree decl,
insert_field_into_struct (get_frame_type (info), field); insert_field_into_struct (get_frame_type (info), field);
elt = ggc_alloc (sizeof (*elt)); elt = GGC_NEW (struct var_map_elt);
elt->old = decl; elt->old = decl;
elt->new = field; elt->new = field;
*slot = elt; *slot = elt;
@ -706,7 +706,7 @@ check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
static struct nesting_info * static struct nesting_info *
create_nesting_tree (struct cgraph_node *cgn) create_nesting_tree (struct cgraph_node *cgn)
{ {
struct nesting_info *info = ggc_calloc (1, sizeof (*info)); struct nesting_info *info = GGC_CNEW (struct nesting_info);
info->var_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free); info->var_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free);
info->context = cgn->decl; info->context = cgn->decl;
@ -805,7 +805,7 @@ get_frame_field (struct nesting_info *info, tree target_context,
static tree static tree
convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data) convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
{ {
struct walk_stmt_info *wi = data; struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *info = wi->info; struct nesting_info *info = wi->info;
tree t = *tp; tree t = *tp;
@ -955,7 +955,7 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
static tree static tree
convert_local_reference (tree *tp, int *walk_subtrees, void *data) convert_local_reference (tree *tp, int *walk_subtrees, void *data)
{ {
struct walk_stmt_info *wi = data; struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *info = wi->info; struct nesting_info *info = wi->info;
tree t = *tp, field, x; tree t = *tp, field, x;
bool save_val_only; bool save_val_only;
@ -1086,7 +1086,7 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
static tree static tree
convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data) convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
{ {
struct walk_stmt_info *wi = data; struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *info = wi->info, *i; struct nesting_info *info = wi->info, *i;
tree t = *tp, label, new_label, target_context, x, arg, field; tree t = *tp, label, new_label, target_context, x, arg, field;
struct var_map_elt *elt, dummy; struct var_map_elt *elt, dummy;
@ -1114,13 +1114,13 @@ convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
can insert the new label into the IL during a second pass. */ can insert the new label into the IL during a second pass. */
dummy.old = label; dummy.old = label;
slot = htab_find_slot (i->var_map, &dummy, INSERT); slot = htab_find_slot (i->var_map, &dummy, INSERT);
elt = *slot; elt = (struct var_map_elt *) *slot;
if (elt == NULL) if (elt == NULL)
{ {
new_label = create_artificial_label (); new_label = create_artificial_label ();
DECL_NONLOCAL (new_label) = 1; DECL_NONLOCAL (new_label) = 1;
elt = ggc_alloc (sizeof (*elt)); elt = GGC_NEW (struct var_map_elt);
elt->old = label; elt->old = label;
elt->new = new_label; elt->new = new_label;
*slot = elt; *slot = elt;
@ -1154,7 +1154,7 @@ convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
static tree static tree
convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data) convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
{ {
struct walk_stmt_info *wi = data; struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *info = wi->info; struct nesting_info *info = wi->info;
tree t = *tp, label, new_label, x; tree t = *tp, label, new_label, x;
struct var_map_elt *elt, dummy; struct var_map_elt *elt, dummy;
@ -1166,7 +1166,7 @@ convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
label = LABEL_EXPR_LABEL (t); label = LABEL_EXPR_LABEL (t);
dummy.old = label; dummy.old = label;
elt = htab_find (info->var_map, &dummy); elt = (struct var_map_elt *) htab_find (info->var_map, &dummy);
if (!elt) if (!elt)
return NULL_TREE; return NULL_TREE;
new_label = elt->new; new_label = elt->new;
@ -1193,7 +1193,7 @@ convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
static tree static tree
convert_tramp_reference (tree *tp, int *walk_subtrees, void *data) convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
{ {
struct walk_stmt_info *wi = data; struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *info = wi->info, *i; struct nesting_info *info = wi->info, *i;
tree t = *tp, decl, target_context, x, arg; tree t = *tp, decl, target_context, x, arg;
@ -1268,7 +1268,7 @@ convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
static tree static tree
convert_call_expr (tree *tp, int *walk_subtrees, void *data) convert_call_expr (tree *tp, int *walk_subtrees, void *data)
{ {
struct walk_stmt_info *wi = data; struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *info = wi->info; struct nesting_info *info = wi->info;
tree t = *tp, decl, target_context; tree t = *tp, decl, target_context;

View File

@ -455,7 +455,7 @@ init_alias_info (void)
tree var; tree var;
bitmap_obstack_initialize (&alias_obstack); bitmap_obstack_initialize (&alias_obstack);
ai = xcalloc (1, sizeof (struct alias_info)); ai = XCNEW (struct alias_info);
ai->ssa_names_visited = sbitmap_alloc (num_ssa_names); ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
sbitmap_zero (ai->ssa_names_visited); sbitmap_zero (ai->ssa_names_visited);
VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs"); VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
@ -1148,7 +1148,7 @@ static void
create_alias_map_for (tree var, struct alias_info *ai) create_alias_map_for (tree var, struct alias_info *ai)
{ {
struct alias_map_d *alias_map; struct alias_map_d *alias_map;
alias_map = xcalloc (1, sizeof (*alias_map)); alias_map = XCNEW (struct alias_map_d);
alias_map->var = var; alias_map->var = var;
alias_map->set = get_alias_set (var); alias_map->set = get_alias_set (var);
ai->addressable_vars[ai->num_addressable_vars++] = alias_map; ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
@ -1194,9 +1194,8 @@ setup_pointers_and_addressables (struct alias_info *ai)
because some TREE_ADDRESSABLE variables will be marked because some TREE_ADDRESSABLE variables will be marked
non-addressable below and only pointers with unique type tags are non-addressable below and only pointers with unique type tags are
going to be added to POINTERS. */ going to be added to POINTERS. */
ai->addressable_vars = xcalloc (num_addressable_vars, ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
sizeof (struct alias_map_d *)); ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
ai->pointers = xcalloc (num_pointers, sizeof (struct alias_map_d *));
ai->num_addressable_vars = 0; ai->num_addressable_vars = 0;
ai->num_pointers = 0; ai->num_pointers = 0;
@ -1857,7 +1856,7 @@ get_tmt_for (tree ptr, struct alias_info *ai)
/* Add PTR to the POINTERS array. Note that we are not interested in /* Add PTR to the POINTERS array. Note that we are not interested in
PTR's alias set. Instead, we cache the alias set for the memory that PTR's alias set. Instead, we cache the alias set for the memory that
PTR points to. */ PTR points to. */
alias_map = xcalloc (1, sizeof (*alias_map)); alias_map = XCNEW (struct alias_map_d);
alias_map->var = ptr; alias_map->var = ptr;
alias_map->set = tag_set; alias_map->set = tag_set;
ai->pointers[ai->num_pointers++] = alias_map; ai->pointers[ai->num_pointers++] = alias_map;
@ -2019,7 +2018,7 @@ get_ptr_info (tree t)
pi = SSA_NAME_PTR_INFO (t); pi = SSA_NAME_PTR_INFO (t);
if (pi == NULL) if (pi == NULL)
{ {
pi = ggc_alloc (sizeof (*pi)); pi = GGC_NEW (struct ptr_info_def);
memset ((void *)pi, 0, sizeof (*pi)); memset ((void *)pi, 0, sizeof (*pi));
SSA_NAME_PTR_INFO (t) = pi; SSA_NAME_PTR_INFO (t) = pi;
} }
@ -2439,7 +2438,8 @@ struct used_part_map
static int static int
used_part_map_eq (const void *va, const void *vb) used_part_map_eq (const void *va, const void *vb)
{ {
const struct used_part_map *a = va, *b = vb; const struct used_part_map *a = (const struct used_part_map *) va;
const struct used_part_map *b = (const struct used_part_map *) vb;
return (a->uid == b->uid); return (a->uid == b->uid);
} }
@ -2467,7 +2467,7 @@ up_lookup (unsigned int uid)
{ {
struct used_part_map *h, in; struct used_part_map *h, in;
in.uid = uid; in.uid = uid;
h = htab_find_with_hash (used_portions, &in, uid); h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid);
if (!h) if (!h)
return NULL; return NULL;
return h->to; return h->to;
@ -2481,7 +2481,7 @@ up_insert (unsigned int uid, used_part_t to)
struct used_part_map *h; struct used_part_map *h;
void **loc; void **loc;
h = xmalloc (sizeof (struct used_part_map)); h = XNEW (struct used_part_map);
h->uid = uid; h->uid = uid;
h->to = to; h->to = to;
loc = htab_find_slot_with_hash (used_portions, h, loc = htab_find_slot_with_hash (used_portions, h,
@ -2501,7 +2501,7 @@ get_or_create_used_part_for (size_t uid)
used_part_t up; used_part_t up;
if ((up = up_lookup (uid)) == NULL) if ((up = up_lookup (uid)) == NULL)
{ {
up = xcalloc (1, sizeof (struct used_part)); up = XCNEW (struct used_part);
up->minused = INT_MAX; up->minused = INT_MAX;
up->maxused = 0; up->maxused = 0;
up->explicit_uses = false; up->explicit_uses = false;
@ -2643,7 +2643,7 @@ create_overlap_variables_for (tree var)
&& fosize == lastfosize && fosize == lastfosize
&& currfotype == lastfotype)) && currfotype == lastfotype))
continue; continue;
sv = ggc_alloc (sizeof (struct subvar)); sv = GGC_NEW (struct subvar);
sv->offset = fo->offset; sv->offset = fo->offset;
sv->size = fosize; sv->size = fosize;
sv->next = *subvars; sv->next = *subvars;

View File

@ -847,10 +847,10 @@ init_copy_prop (bool phis_only)
{ {
basic_block bb; basic_block bb;
copy_of = xmalloc (num_ssa_names * sizeof (*copy_of)); copy_of = XNEWVEC (prop_value_t, num_ssa_names);
memset (copy_of, 0, num_ssa_names * sizeof (*copy_of)); memset (copy_of, 0, num_ssa_names * sizeof (*copy_of));
cached_last_copy_of = xmalloc (num_ssa_names * sizeof (*cached_last_copy_of)); cached_last_copy_of = XNEWVEC (tree, num_ssa_names);
memset (cached_last_copy_of, 0, num_ssa_names * sizeof (*cached_last_copy_of)); memset (cached_last_copy_of, 0, num_ssa_names * sizeof (*cached_last_copy_of));
FOR_EACH_BB (bb) FOR_EACH_BB (bb)
@ -902,7 +902,7 @@ fini_copy_prop (void)
/* Set the final copy-of value for each variable by traversing the /* Set the final copy-of value for each variable by traversing the
copy-of chains. */ copy-of chains. */
tmp = xmalloc (num_ssa_names * sizeof (*tmp)); tmp = XNEWVEC (prop_value_t, num_ssa_names);
memset (tmp, 0, num_ssa_names * sizeof (*tmp)); memset (tmp, 0, num_ssa_names * sizeof (*tmp));
for (i = 1; i < num_ssa_names; i++) for (i = 1; i < num_ssa_names; i++)
{ {

View File

@ -837,8 +837,7 @@ tree_dce_init (bool aggressive)
{ {
int i; int i;
control_dependence_map control_dependence_map = XNEWVEC (bitmap, last_basic_block);
= xmalloc (last_basic_block * sizeof (bitmap));
for (i = 0; i < last_basic_block; ++i) for (i = 0; i < last_basic_block; ++i)
control_dependence_map[i] = BITMAP_ALLOC (NULL); control_dependence_map[i] = BITMAP_ALLOC (NULL);

View File

@ -315,7 +315,7 @@ allocate_edge_info (edge e)
{ {
struct edge_info *edge_info; struct edge_info *edge_info;
edge_info = xcalloc (1, sizeof (struct edge_info)); edge_info = XCNEW (struct edge_info);
e->aux = edge_info; e->aux = edge_info;
return edge_info; return edge_info;
@ -338,7 +338,7 @@ free_all_edge_infos (void)
{ {
FOR_EACH_EDGE (e, ei, bb->preds) FOR_EACH_EDGE (e, ei, bb->preds)
{ {
struct edge_info *edge_info = e->aux; struct edge_info *edge_info = (struct edge_info *) e->aux;
if (edge_info) if (edge_info)
{ {
@ -356,7 +356,7 @@ free_all_edge_infos (void)
static void static void
vrp_free (void *data) vrp_free (void *data)
{ {
struct vrp_hash_elt *elt = data; struct vrp_hash_elt *elt = (struct vrp_hash_elt *) data;
struct VEC(vrp_element_p,heap) **vrp_elt = &elt->records; struct VEC(vrp_element_p,heap) **vrp_elt = &elt->records;
VEC_free (vrp_element_p, heap, *vrp_elt); VEC_free (vrp_element_p, heap, *vrp_elt);
@ -770,7 +770,7 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e)
unsigned int num, i = 0; unsigned int num, i = 0;
num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE)); num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
copy = xcalloc (num, sizeof (tree)); copy = XCNEWVEC (tree, num);
/* Make a copy of the uses & vuses into USES_COPY, then cprop into /* Make a copy of the uses & vuses into USES_COPY, then cprop into
the operands. */ the operands. */
@ -877,7 +877,7 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e)
/* Stuff the operator and operands into our dummy conditional /* Stuff the operator and operands into our dummy conditional
expression, creating the dummy conditional if necessary. */ expression, creating the dummy conditional if necessary. */
dummy_cond = walk_data->global_data; dummy_cond = (tree) walk_data->global_data;
if (! dummy_cond) if (! dummy_cond)
{ {
dummy_cond = build2 (cond_code, boolean_type_node, op0, op1); dummy_cond = build2 (cond_code, boolean_type_node, op0, op1);
@ -934,7 +934,7 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e)
struct edge_info *edge_info; struct edge_info *edge_info;
if (e->aux) if (e->aux)
edge_info = e->aux; edge_info = (struct edge_info *) e->aux;
else else
edge_info = allocate_edge_info (e); edge_info = allocate_edge_info (e);
edge_info->redirection_target = taken_edge; edge_info->redirection_target = taken_edge;
@ -1119,7 +1119,7 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE); VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE); VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
edge_info = true_edge->aux; edge_info = (struct edge_info *) true_edge->aux;
/* If we have info associated with this edge, record it into /* If we have info associated with this edge, record it into
our equivalency tables. */ our equivalency tables. */
@ -1160,7 +1160,7 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
struct edge_info *edge_info; struct edge_info *edge_info;
unsigned int i; unsigned int i;
edge_info = false_edge->aux; edge_info = (struct edge_info *) false_edge->aux;
/* If we have info associated with this edge, record it into /* If we have info associated with this edge, record it into
our equivalency tables. */ our equivalency tables. */
@ -1377,7 +1377,7 @@ record_equivalences_from_incoming_edge (basic_block bb)
{ {
unsigned int i; unsigned int i;
edge_info = e->aux; edge_info = (struct edge_info *) e->aux;
if (edge_info) if (edge_info)
{ {
@ -1496,7 +1496,7 @@ record_var_is_nonzero (tree var)
static void static void
record_cond (tree cond, tree value) record_cond (tree cond, tree value)
{ {
struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt)); struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
void **slot; void **slot;
initialize_hash_element (cond, value, element); initialize_hash_element (cond, value, element);
@ -1546,7 +1546,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
case LT_EXPR: case LT_EXPR:
case GT_EXPR: case GT_EXPR:
edge_info->max_cond_equivalences = 12; edge_info->max_cond_equivalences = 12;
edge_info->cond_equivalences = xmalloc (12 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 12);
build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
? LE_EXPR : GE_EXPR), ? LE_EXPR : GE_EXPR),
op0, op1, &edge_info->cond_equivalences[4]); op0, op1, &edge_info->cond_equivalences[4]);
@ -1561,14 +1561,14 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
case GE_EXPR: case GE_EXPR:
case LE_EXPR: case LE_EXPR:
edge_info->max_cond_equivalences = 6; edge_info->max_cond_equivalences = 6;
edge_info->cond_equivalences = xmalloc (6 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 6);
build_and_record_new_cond (ORDERED_EXPR, op0, op1, build_and_record_new_cond (ORDERED_EXPR, op0, op1,
&edge_info->cond_equivalences[4]); &edge_info->cond_equivalences[4]);
break; break;
case EQ_EXPR: case EQ_EXPR:
edge_info->max_cond_equivalences = 10; edge_info->max_cond_equivalences = 10;
edge_info->cond_equivalences = xmalloc (10 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 10);
build_and_record_new_cond (ORDERED_EXPR, op0, op1, build_and_record_new_cond (ORDERED_EXPR, op0, op1,
&edge_info->cond_equivalences[4]); &edge_info->cond_equivalences[4]);
build_and_record_new_cond (LE_EXPR, op0, op1, build_and_record_new_cond (LE_EXPR, op0, op1,
@ -1579,7 +1579,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
case UNORDERED_EXPR: case UNORDERED_EXPR:
edge_info->max_cond_equivalences = 16; edge_info->max_cond_equivalences = 16;
edge_info->cond_equivalences = xmalloc (16 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 16);
build_and_record_new_cond (NE_EXPR, op0, op1, build_and_record_new_cond (NE_EXPR, op0, op1,
&edge_info->cond_equivalences[4]); &edge_info->cond_equivalences[4]);
build_and_record_new_cond (UNLE_EXPR, op0, op1, build_and_record_new_cond (UNLE_EXPR, op0, op1,
@ -1597,7 +1597,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
case UNLT_EXPR: case UNLT_EXPR:
case UNGT_EXPR: case UNGT_EXPR:
edge_info->max_cond_equivalences = 8; edge_info->max_cond_equivalences = 8;
edge_info->cond_equivalences = xmalloc (8 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 8);
build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
? UNLE_EXPR : UNGE_EXPR), ? UNLE_EXPR : UNGE_EXPR),
op0, op1, &edge_info->cond_equivalences[4]); op0, op1, &edge_info->cond_equivalences[4]);
@ -1607,7 +1607,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
case UNEQ_EXPR: case UNEQ_EXPR:
edge_info->max_cond_equivalences = 8; edge_info->max_cond_equivalences = 8;
edge_info->cond_equivalences = xmalloc (8 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 8);
build_and_record_new_cond (UNLE_EXPR, op0, op1, build_and_record_new_cond (UNLE_EXPR, op0, op1,
&edge_info->cond_equivalences[4]); &edge_info->cond_equivalences[4]);
build_and_record_new_cond (UNGE_EXPR, op0, op1, build_and_record_new_cond (UNGE_EXPR, op0, op1,
@ -1616,7 +1616,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
case LTGT_EXPR: case LTGT_EXPR:
edge_info->max_cond_equivalences = 8; edge_info->max_cond_equivalences = 8;
edge_info->cond_equivalences = xmalloc (8 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 8);
build_and_record_new_cond (NE_EXPR, op0, op1, build_and_record_new_cond (NE_EXPR, op0, op1,
&edge_info->cond_equivalences[4]); &edge_info->cond_equivalences[4]);
build_and_record_new_cond (ORDERED_EXPR, op0, op1, build_and_record_new_cond (ORDERED_EXPR, op0, op1,
@ -1625,7 +1625,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
default: default:
edge_info->max_cond_equivalences = 4; edge_info->max_cond_equivalences = 4;
edge_info->cond_equivalences = xmalloc (4 * sizeof (tree)); edge_info->cond_equivalences = XNEWVEC (tree, 4);
break; break;
} }
@ -2201,7 +2201,7 @@ record_edge_info (basic_block bb)
{ {
tree labels = SWITCH_LABELS (stmt); tree labels = SWITCH_LABELS (stmt);
int i, n_labels = TREE_VEC_LENGTH (labels); int i, n_labels = TREE_VEC_LENGTH (labels);
tree *info = xcalloc (last_basic_block, sizeof (tree)); tree *info = XCNEWVEC (tree, last_basic_block);
edge e; edge e;
edge_iterator ei; edge_iterator ei;
@ -2891,7 +2891,7 @@ lookup_avail_expr (tree stmt, bool insert)
void **slot; void **slot;
tree lhs; tree lhs;
tree temp; tree temp;
struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt)); struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL; lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL;
@ -3062,7 +3062,7 @@ record_range (tree cond, basic_block bb)
void **slot; void **slot;
vrp_hash_elt = xmalloc (sizeof (struct vrp_hash_elt)); vrp_hash_elt = XNEW (struct vrp_hash_elt);
vrp_hash_elt->var = TREE_OPERAND (cond, 0); vrp_hash_elt->var = TREE_OPERAND (cond, 0);
vrp_hash_elt->records = NULL; vrp_hash_elt->records = NULL;
slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT); slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
@ -3075,7 +3075,7 @@ record_range (tree cond, basic_block bb)
vrp_hash_elt = (struct vrp_hash_elt *) *slot; vrp_hash_elt = (struct vrp_hash_elt *) *slot;
vrp_records_p = &vrp_hash_elt->records; vrp_records_p = &vrp_hash_elt->records;
element = ggc_alloc (sizeof (struct vrp_element)); element = GGC_NEW (struct vrp_element);
element->low = NULL; element->low = NULL;
element->high = NULL; element->high = NULL;
element->cond = cond; element->cond = cond;

View File

@ -639,7 +639,7 @@ calculate_live_on_entry (var_map map)
} }
} }
stack = xmalloc (sizeof (int) * last_basic_block); stack = XNEWVEC (int, last_basic_block);
EXECUTE_IF_SET_IN_BITMAP (live->global, 0, i, bi) EXECUTE_IF_SET_IN_BITMAP (live->global, 0, i, bi)
{ {
live_worklist (live, stack, i); live_worklist (live, stack, i);
@ -1224,7 +1224,7 @@ sort_coalesce_list (coalesce_list_p cl)
/* Only call qsort if there are more than 2 items. */ /* Only call qsort if there are more than 2 items. */
if (num > 2) if (num > 2)
{ {
list = xmalloc (sizeof (partition_pair_p) * num); list = XNEWVEC (partition_pair_p, num);
count = 0; count = 0;
for (p = chain; p != NULL; p = p->next) for (p = chain; p != NULL; p = p->next)
list[count++] = p; list[count++] = p;
@ -1345,8 +1345,8 @@ build_tree_conflict_graph (tree_live_info_p liveinfo, tpa_p tpa,
live = BITMAP_ALLOC (NULL); live = BITMAP_ALLOC (NULL);
partition_link = xcalloc (num_var_partitions (map) + 1, sizeof (int)); partition_link = XCNEWVEC (int, num_var_partitions (map) + 1);
tpa_nodes = xcalloc (tpa_num_trees (tpa), sizeof (int)); tpa_nodes = XCNEWVEC (int, tpa_num_trees (tpa));
tpa_to_clear = VEC_alloc (int, heap, 50); tpa_to_clear = VEC_alloc (int, heap, 50);
FOR_EACH_BB (bb) FOR_EACH_BB (bb)

View File

@ -279,7 +279,7 @@ ssa_operand_alloc (unsigned size)
if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE) if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
{ {
struct ssa_operand_memory_d *ptr; struct ssa_operand_memory_d *ptr;
ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d)); ptr = GGC_NEW (struct ssa_operand_memory_d);
ptr->next = operand_memory; ptr->next = operand_memory;
operand_memory = ptr; operand_memory = ptr;
operand_memory_index = 0; operand_memory_index = 0;

View File

@ -247,7 +247,7 @@ static basic_block *
blocks_in_phiopt_order (void) blocks_in_phiopt_order (void)
{ {
basic_block x, y; basic_block x, y;
basic_block *order = xmalloc (sizeof (basic_block) * n_basic_blocks); basic_block *order = XNEWVEC (basic_block, n_basic_blocks);
unsigned n = n_basic_blocks - NUM_FIXED_BLOCKS; unsigned n = n_basic_blocks - NUM_FIXED_BLOCKS;
unsigned np, i; unsigned np, i;
sbitmap visited = sbitmap_alloc (last_basic_block); sbitmap visited = sbitmap_alloc (last_basic_block);

View File

@ -401,7 +401,7 @@ static inline void
phi_trans_add (tree e, tree v, basic_block pred) phi_trans_add (tree e, tree v, basic_block pred)
{ {
void **slot; void **slot;
expr_pred_trans_t new_pair = xmalloc (sizeof (*new_pair)); expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
new_pair->e = e; new_pair->e = e;
new_pair->pred = pred; new_pair->pred = pred;
new_pair->v = v; new_pair->v = v;
@ -476,7 +476,7 @@ value_insert_into_set_bitmap (value_set_t set, tree v)
static bitmap_set_t static bitmap_set_t
bitmap_set_new (void) bitmap_set_new (void)
{ {
bitmap_set_t ret = pool_alloc (bitmap_set_pool); bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack); ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
ret->values = BITMAP_ALLOC (&grand_bitmap_obstack); ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
return ret; return ret;
@ -488,7 +488,7 @@ static value_set_t
set_new (bool indexed) set_new (bool indexed)
{ {
value_set_t ret; value_set_t ret;
ret = pool_alloc (value_set_pool); ret = (value_set_t) pool_alloc (value_set_pool);
ret->head = ret->tail = NULL; ret->head = ret->tail = NULL;
ret->length = 0; ret->length = 0;
ret->indexed = indexed; ret->indexed = indexed;
@ -519,7 +519,7 @@ bitmap_insert_into_set (bitmap_set_t set, tree expr)
static void static void
insert_into_set (value_set_t set, tree expr) insert_into_set (value_set_t set, tree expr)
{ {
value_set_node_t newnode = pool_alloc (value_set_node_pool); value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
tree val = get_value_handle (expr); tree val = get_value_handle (expr);
gcc_assert (val); gcc_assert (val);
@ -920,7 +920,7 @@ pool_copy_list (tree list)
if (list == 0) if (list == 0)
return 0; return 0;
head = pool_alloc (list_node_pool); head = (tree) pool_alloc (list_node_pool);
memcpy (head, list, tree_size (list)); memcpy (head, list, tree_size (list));
prev = head; prev = head;
@ -928,7 +928,7 @@ pool_copy_list (tree list)
next = TREE_CHAIN (list); next = TREE_CHAIN (list);
while (next) while (next)
{ {
TREE_CHAIN (prev) = pool_alloc (list_node_pool); TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
memcpy (TREE_CHAIN (prev), next, tree_size (next)); memcpy (TREE_CHAIN (prev), next, tree_size (next));
prev = TREE_CHAIN (prev); prev = TREE_CHAIN (prev);
next = TREE_CHAIN (next); next = TREE_CHAIN (next);
@ -1030,7 +1030,7 @@ phi_translate (tree expr, value_set_t set, basic_block pred,
if (listchanged || (newop0 != oldop0) || (oldop2 != newop2)) if (listchanged || (newop0 != oldop0) || (oldop2 != newop2))
{ {
newexpr = pool_alloc (expression_node_pool); newexpr = (tree) pool_alloc (expression_node_pool);
memcpy (newexpr, expr, tree_size (expr)); memcpy (newexpr, expr, tree_size (expr));
TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0); TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist; TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
@ -1068,7 +1068,7 @@ phi_translate (tree expr, value_set_t set, basic_block pred,
if (newop1 != oldop1 || newop2 != oldop2) if (newop1 != oldop1 || newop2 != oldop2)
{ {
tree t; tree t;
newexpr = pool_alloc (binary_node_pool); newexpr = (tree) pool_alloc (binary_node_pool);
memcpy (newexpr, expr, tree_size (expr)); memcpy (newexpr, expr, tree_size (expr));
TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1); TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2); TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
@ -1102,7 +1102,7 @@ phi_translate (tree expr, value_set_t set, basic_block pred,
if (newop1 != oldop1) if (newop1 != oldop1)
{ {
tree t; tree t;
newexpr = pool_alloc (unary_node_pool); newexpr = (tree) pool_alloc (unary_node_pool);
memcpy (newexpr, expr, tree_size (expr)); memcpy (newexpr, expr, tree_size (expr));
TREE_OPERAND (newexpr, 0) = get_value_handle (newop1); TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
t = fully_constant_expression (newexpr); t = fully_constant_expression (newexpr);
@ -1868,7 +1868,7 @@ insert_aux (basic_block block)
continue; continue;
} }
avail = xcalloc (last_basic_block, sizeof (tree)); avail = XCNEWVEC (tree, last_basic_block);
FOR_EACH_EDGE (pred, ei, block->preds) FOR_EACH_EDGE (pred, ei, block->preds)
{ {
tree vprime; tree vprime;
@ -2072,7 +2072,7 @@ create_value_expr_from (tree expr, basic_block block, tree stmt)
pool = expression_node_pool; pool = expression_node_pool;
} }
vexpr = pool_alloc (pool); vexpr = (tree) pool_alloc (pool);
memcpy (vexpr, expr, tree_size (expr)); memcpy (vexpr, expr, tree_size (expr));
/* This case is only for TREE_LIST's that appear as part of /* This case is only for TREE_LIST's that appear as part of
@ -2365,7 +2365,7 @@ compute_avail (void)
} }
/* Allocate the worklist. */ /* Allocate the worklist. */
worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); worklist = XNEWVEC (basic_block, n_basic_blocks);
/* Seed the algorithm by putting the dominator children of the entry /* Seed the algorithm by putting the dominator children of the entry
block on the worklist. */ block on the worklist. */

View File

@ -95,12 +95,12 @@ associate_equivalences_with_edges (void)
if (TREE_CODE (cond) == SSA_NAME if (TREE_CODE (cond) == SSA_NAME
&& !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (cond)) && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (cond))
{ {
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->rhs = constant_boolean_node (1, TREE_TYPE (cond)); equivalency->rhs = constant_boolean_node (1, TREE_TYPE (cond));
equivalency->lhs = cond; equivalency->lhs = cond;
true_edge->aux = equivalency; true_edge->aux = equivalency;
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->rhs = constant_boolean_node (0, TREE_TYPE (cond)); equivalency->rhs = constant_boolean_node (0, TREE_TYPE (cond));
equivalency->lhs = cond; equivalency->lhs = cond;
false_edge->aux = equivalency; false_edge->aux = equivalency;
@ -121,14 +121,14 @@ associate_equivalences_with_edges (void)
{ {
if (TREE_CODE (cond) == EQ_EXPR) if (TREE_CODE (cond) == EQ_EXPR)
{ {
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->lhs = op0; equivalency->lhs = op0;
equivalency->rhs = (integer_zerop (op1) equivalency->rhs = (integer_zerop (op1)
? boolean_false_node ? boolean_false_node
: boolean_true_node); : boolean_true_node);
true_edge->aux = equivalency; true_edge->aux = equivalency;
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->lhs = op0; equivalency->lhs = op0;
equivalency->rhs = (integer_zerop (op1) equivalency->rhs = (integer_zerop (op1)
? boolean_true_node ? boolean_true_node
@ -137,14 +137,14 @@ associate_equivalences_with_edges (void)
} }
else else
{ {
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->lhs = op0; equivalency->lhs = op0;
equivalency->rhs = (integer_zerop (op1) equivalency->rhs = (integer_zerop (op1)
? boolean_true_node ? boolean_true_node
: boolean_false_node); : boolean_false_node);
true_edge->aux = equivalency; true_edge->aux = equivalency;
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->lhs = op0; equivalency->lhs = op0;
equivalency->rhs = (integer_zerop (op1) equivalency->rhs = (integer_zerop (op1)
? boolean_false_node ? boolean_false_node
@ -168,7 +168,7 @@ associate_equivalences_with_edges (void)
|| REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (op1)))) || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (op1))))
continue; continue;
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->lhs = op0; equivalency->lhs = op0;
equivalency->rhs = op1; equivalency->rhs = op1;
if (TREE_CODE (cond) == EQ_EXPR) if (TREE_CODE (cond) == EQ_EXPR)
@ -194,7 +194,7 @@ associate_equivalences_with_edges (void)
{ {
tree labels = SWITCH_LABELS (stmt); tree labels = SWITCH_LABELS (stmt);
int i, n_labels = TREE_VEC_LENGTH (labels); int i, n_labels = TREE_VEC_LENGTH (labels);
tree *info = xcalloc (n_basic_blocks, sizeof (tree)); tree *info = XCNEWVEC (tree, n_basic_blocks);
/* Walk over the case label vector. Record blocks /* Walk over the case label vector. Record blocks
which are reached by a single case label which represents which are reached by a single case label which represents
@ -227,7 +227,7 @@ associate_equivalences_with_edges (void)
/* Record an equivalency on the edge from BB to basic /* Record an equivalency on the edge from BB to basic
block I. */ block I. */
equivalency = xmalloc (sizeof (struct edge_equivalency)); equivalency = XNEW (struct edge_equivalency);
equivalency->rhs = x; equivalency->rhs = x;
equivalency->lhs = cond; equivalency->lhs = cond;
find_edge (bb, BASIC_BLOCK (i))->aux = equivalency; find_edge (bb, BASIC_BLOCK (i))->aux = equivalency;
@ -364,7 +364,7 @@ record_equiv (tree value, tree equivalence)
struct equiv_hash_elt *equiv_hash_elt; struct equiv_hash_elt *equiv_hash_elt;
void **slot; void **slot;
equiv_hash_elt = xmalloc (sizeof (struct equiv_hash_elt)); equiv_hash_elt = XNEW (struct equiv_hash_elt);
equiv_hash_elt->value = value; equiv_hash_elt->value = value;
equiv_hash_elt->equivalences = NULL; equiv_hash_elt->equivalences = NULL;
@ -486,7 +486,7 @@ uncprop_into_successor_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Record any equivalency associated with E. */ /* Record any equivalency associated with E. */
if (e->aux) if (e->aux)
{ {
struct edge_equivalency *equiv = e->aux; struct edge_equivalency *equiv = (struct edge_equivalency *) e->aux;
record_equiv (equiv->rhs, equiv->lhs); record_equiv (equiv->rhs, equiv->lhs);
} }
@ -512,7 +512,7 @@ uncprop_into_successor_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (slot) if (slot)
{ {
struct equiv_hash_elt *elt = *slot; struct equiv_hash_elt *elt = (struct equiv_hash_elt *) *slot;
int j; int j;
/* Walk every equivalence with the same value. If we find /* Walk every equivalence with the same value. If we find
@ -536,7 +536,7 @@ uncprop_into_successor_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* If we had an equivalence associated with this edge, remove it. */ /* If we had an equivalence associated with this edge, remove it. */
if (e->aux) if (e->aux)
{ {
struct edge_equivalency *equiv = e->aux; struct edge_equivalency *equiv = (struct edge_equivalency *) e->aux;
remove_equivalence (equiv->rhs); remove_equivalence (equiv->rhs);
} }
} }
@ -589,7 +589,7 @@ uncprop_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (e && e->src == parent && e->aux) if (e && e->src == parent && e->aux)
{ {
struct edge_equivalency *equiv = e->aux; struct edge_equivalency *equiv = (struct edge_equivalency *) e->aux;
record_equiv (equiv->rhs, equiv->lhs); record_equiv (equiv->rhs, equiv->lhs);
VEC_safe_push (tree, heap, equiv_stack, equiv->rhs); VEC_safe_push (tree, heap, equiv_stack, equiv->rhs);

View File

@ -526,7 +526,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
if (!tail_recursion && (m || a)) if (!tail_recursion && (m || a))
return; return;
nw = xmalloc (sizeof (struct tailcall)); nw = XNEW (struct tailcall);
nw->call_block = bb; nw->call_block = bb;
nw->call_bsi = bsi; nw->call_bsi = bsi;