tree-vrp.c: Use XNEW/XCNEW allocation wrappers.

2006-01-31  Marcin Dalecki  <martin@dalecki.de>

	* tree-vrp.c: Use XNEW/XCNEW allocation wrappers.
	* regrename.c: Ditto.
	* tree-ssa-loop-im.c: Ditto.
	* tree-dump.c: Ditto.
	* tree-complex.c: Ditto.
	* genrecog.c: Ditto.
	* tree-ssa-threadupdate.c: Ditto.
	* tracer.c: Ditto.
	* java/class.c: Ditto.
	* java/jcf-parse.c: Ditto.
	* java/resource.c: Ditto.
	* java/except.c: Ditto.
	* java/jvspec.c: Ditto.
	* java/jcf-write.c: Ditto.
	* java/jcf-path.c: Ditto.
	* java/gjavah.c: Ditto.
	* java/zextract.c: Ditto.
	* java/jcf-io.c: Ditto.
	* java/jcf.h: Ditto.
	* java/buffer.c: Ditto.
	* java/lang.c: Ditto.
	* java/parse-scan.y: Ditto.
	* java/lex.c: Ditto.
	* java/lex.h: Ditto.
	* cfgloopmanip.c: Ditto.
	* postreload-gcse.c: Ditto.
	* tree-ssa-loop-manip.c: Ditto.
	* postreload.c: Ditto.
	* tree-ssa-loop-ch.c: Ditto.
	* loop.c: Ditto.
	* ipa-cp.c: Ditto.
	* cppspec.c: Ditto.
	* diagnostic.c: Ditto.
	* final.c: Ditto.
	* genoutput.c: Ditto.
	* gcc.c: Ditto.
	* cfghooks.c: Ditto.
	* cfgloopanal.c: Ditto.
	* objc/objc-act.c: Ditto.
	* gcov.c: Ditto.
	* genextract.c: Ditto.
	* genautomata.c: Ditto.
	* pretty-print.c: Ditto.
	* genemit.c: Ditto.
	* cgraphunit.c: Ditto.
	* flow.c: Ditto.
	* df-scan.c: Ditto.
	* haifa-sched.c: Ditto.
	* dominance.c: Ditto.
	* dbxout.c: Ditto.
	* tree-ssa-loop-ivopts.c: Ditto.
	* df-core.c: Ditto.
	* mode-switching.c: Ditto.
	* modulo-sched.c: Ditto.
	* graph.c: Ditto.
	* ipa-pure-const.c: Ditto.
	* cse.c: Ditto.
	* fix-header.c: Ditto.
	* web.c: Ditto.
	* tree-stdarg.c: Ditto.
	* ipa-utils.c: Ditto.
	* loop-init.c: Ditto.
	* ipa-inline.c: Ditto.
	* cfganal.c: Ditto.
	* global.c: Ditto.
	* alloc-pool.c: Ditto.
	* dwarf2out.c: Ditto.
	* opts.c: Ditto.
	* genattrtab.c: Ditto.
	* tree-ssa-loop-ivcanon.c: Ditto.
	* predict.c: Ditto.
	* timevar.c: Ditto.
	* lcm.c: Ditto.
	* fortran/gfortranspec.c: Ditto.
	* regmove.c: Ditto.
	* local-alloc.c: Ditto.
	* langhooks.c: Ditto.
	* function.c: Ditto.
	* tree-vectorizer.c: Ditto.
	* gcse.c: Ditto.
	* ipa-type-escape.c: Ditto.
	* alias.c: Ditto.
	* tree-if-conv.c: Ditto.
	* profile.c: Ditto.
	* ipa.c: Ditto.
	* tree-data-ref.c: Ditto.
	* loop-unroll.c: Ditto.
	* treelang/treetree.c: Ditto.
	* calls.c: Ditto.
	* bt-load.c: Ditto.
	* ggc-common.c: Ditto.
	* except.c: Ditto.
	* coverage.c: Ditto.
	* cselib.c: Ditto.
	* tree-cfgcleanup.c: Ditto.
	* tree-ssa-pre.c: Ditto.
	* cfgcleanup.c: Ditto.
	* loop-invariant.c: Ditto.
	* loop-iv.c: Ditto.
	* ipa-prop.c: Ditto.
	* print-tree.c: Ditto.
	* conflict.c: Ditto.
	* ggc-page.c: Ditto.
	* sched-deps.c: Ditto.
	* regclass.c: Ditto.
	* tree-object-size.c: Ditto.
	* combine.c: Ditto.
	* bb-reorder.c: Ditto.
	* resource.c: Ditto.
	* var-tracking.c: Ditto.
	* cfgloop.c: Ditto.
	* df-problems.c: Ditto.
	* reg-stack.c: Ditto.
	* tlink.c: Ditto.
	* gccspec.c: Ditto.
	* sched-rgn.c: Ditto.
	* tree-ssa-structalias.c: Ditto.
	* tree-ssa-reassoc.c: Ditto.
	* config/darwin-c.c: Ditto.
	* config/darwin.c: Ditto.
	* config/arm/arm.c: Ditto.
	* cfgrtl.c: Ditto.
	* collect2.c: Ditto.
	* reload1.c: Ditto.

From-SVN: r110446
This commit is contained in:
Marcin Dalecki 2006-01-31 20:56:55 +01:00 committed by Marcin Dalecki
parent 8f13c4d68f
commit 5ed6ace578
125 changed files with 633 additions and 534 deletions

View File

@ -1,3 +1,130 @@
2006-01-31 Marcin Dalecki <martin@dalecki.de>
* tree-vrp.c: Use XNEW/XCNEW allocation wrappers.
* regrename.c: Ditto.
* tree-ssa-loop-im.c: Ditto.
* tree-dump.c: Ditto.
* tree-complex.c: Ditto.
* genrecog.c: Ditto.
* tree-ssa-threadupdate.c: Ditto.
* tracer.c: Ditto.
* java/class.c: Ditto.
* java/jcf-parse.c: Ditto.
* java/resource.c: Ditto.
* java/except.c: Ditto.
* java/jvspec.c: Ditto.
* java/jcf-write.c: Ditto.
* java/jcf-path.c: Ditto.
* java/gjavah.c: Ditto.
* java/zextract.c: Ditto.
* java/jcf-io.c: Ditto.
* java/jcf.h: Ditto.
* java/buffer.c: Ditto.
* java/lang.c: Ditto.
* java/parse-scan.y: Ditto.
* java/lex.c: Ditto.
* java/lex.h: Ditto.
* cfgloopmanip.c: Ditto.
* postreload-gcse.c: Ditto.
* tree-ssa-loop-manip.c: Ditto.
* postreload.c: Ditto.
* tree-ssa-loop-ch.c: Ditto.
* loop.c: Ditto.
* ipa-cp.c: Ditto.
* cppspec.c: Ditto.
* diagnostic.c: Ditto.
* final.c: Ditto.
* genoutput.c: Ditto.
* gcc.c: Ditto.
* cfghooks.c: Ditto.
* cfgloopanal.c: Ditto.
* objc/objc-act.c: Ditto.
* gcov.c: Ditto.
* genextract.c: Ditto.
* genautomata.c: Ditto.
* pretty-print.c: Ditto.
* genemit.c: Ditto.
* cgraphunit.c: Ditto.
* flow.c: Ditto.
* df-scan.c: Ditto.
* haifa-sched.c: Ditto.
* dominance.c: Ditto.
* dbxout.c: Ditto.
* tree-ssa-loop-ivopts.c: Ditto.
* df-core.c: Ditto.
* mode-switching.c: Ditto.
* modulo-sched.c: Ditto.
* graph.c: Ditto.
* ipa-pure-const.c: Ditto.
* cse.c: Ditto.
* fix-header.c: Ditto.
* web.c: Ditto.
* tree-stdarg.c: Ditto.
* ipa-utils.c: Ditto.
* loop-init.c: Ditto.
* ipa-inline.c: Ditto.
* cfganal.c: Ditto.
* global.c: Ditto.
* alloc-pool.c: Ditto.
* dwarf2out.c: Ditto.
* opts.c: Ditto.
* genattrtab.c: Ditto.
* tree-ssa-loop-ivcanon.c: Ditto.
* predict.c: Ditto.
* timevar.c: Ditto.
* lcm.c: Ditto.
* fortran/gfortranspec.c: Ditto.
* regmove.c: Ditto.
* local-alloc.c: Ditto.
* langhooks.c: Ditto.
* function.c: Ditto.
* tree-vectorizer.c: Ditto.
* gcse.c: Ditto.
* ipa-type-escape.c: Ditto.
* alias.c: Ditto.
* tree-if-conv.c: Ditto.
* profile.c: Ditto.
* ipa.c: Ditto.
* tree-data-ref.c: Ditto.
* loop-unroll.c: Ditto.
* treelang/treetree.c: Ditto.
* calls.c: Ditto.
* bt-load.c: Ditto.
* ggc-common.c: Ditto.
* except.c: Ditto.
* coverage.c: Ditto.
* cselib.c: Ditto.
* tree-cfgcleanup.c: Ditto.
* tree-ssa-pre.c: Ditto.
* cfgcleanup.c: Ditto.
* loop-invariant.c: Ditto.
* loop-iv.c: Ditto.
* ipa-prop.c: Ditto.
* print-tree.c: Ditto.
* conflict.c: Ditto.
* ggc-page.c: Ditto.
* sched-deps.c: Ditto.
* regclass.c: Ditto.
* tree-object-size.c: Ditto.
* combine.c: Ditto.
* bb-reorder.c: Ditto.
* resource.c: Ditto.
* var-tracking.c: Ditto.
* cfgloop.c: Ditto.
* df-problems.c: Ditto.
* reg-stack.c: Ditto.
* tlink.c: Ditto.
* gccspec.c: Ditto.
* sched-rgn.c: Ditto.
* tree-ssa-structalias.c: Ditto.
* tree-ssa-reassoc.c: Ditto.
* config/darwin-c.c: Ditto.
* config/darwin.c: Ditto.
* config/arm/arm.c: Ditto.
* cfgrtl.c: Ditto.
* collect2.c: Ditto.
* reload1.c: Ditto.
2006-01-31 Nicolas Pitre <nico@cam.org>
* arm.md (smaxsi3): Make aware of smax_m1.

View File

@ -2498,8 +2498,8 @@ init_alias_analysis (void)
VARRAY_RTX_INIT (reg_base_value, maxreg, "reg_base_value");
}
new_reg_base_value = xmalloc (maxreg * sizeof (rtx));
reg_seen = xmalloc (maxreg);
new_reg_base_value = XNEWVEC (rtx, maxreg);
reg_seen = XNEWVEC (char, maxreg);
/* The basic idea is that each pass through this loop will use the
"constant" information from the previous pass to propagate alias

View File

@ -228,7 +228,7 @@ pool_alloc (alloc_pool pool)
alloc_pool_list block_header;
/* Make the block. */
block = xmalloc (pool->block_size);
block = XNEWVEC (char, pool->block_size);
block_header = (alloc_pool_list) block;
block += align_eight (sizeof (struct alloc_pool_list_def));
#ifdef GATHER_STATISTICS

View File

@ -911,7 +911,7 @@ connect_traces (int n_traces, struct trace *traces)
else
count_threshold = max_entry_count / 1000 * DUPLICATION_THRESHOLD;
connected = xcalloc (n_traces, sizeof (bool));
connected = XCNEWVEC (bool, n_traces);
last_trace = -1;
current_pass = 1;
current_partition = BB_PARTITION (traces[0].first);
@ -1912,7 +1912,7 @@ reorder_basic_blocks (unsigned int flags)
/* We need to know some information for each basic block. */
array_size = GET_ARRAY_SIZE (last_basic_block);
bbd = xmalloc (array_size * sizeof (bbro_basic_block_data));
bbd = XNEWVEC (bbro_basic_block_data, array_size);
for (i = 0; i < array_size; i++)
{
bbd[i].start_of_trace = -1;
@ -1922,7 +1922,7 @@ reorder_basic_blocks (unsigned int flags)
bbd[i].node = NULL;
}
traces = xmalloc (n_basic_blocks * sizeof (struct trace));
traces = XNEWVEC (struct trace, n_basic_blocks);
n_traces = 0;
find_traces (&n_traces, traces);
connect_traces (n_traces, traces);
@ -2172,7 +2172,7 @@ partition_hot_cold_basic_blocks (void)
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
return;
crossing_edges = xcalloc (max_edges, sizeof (edge));
crossing_edges = XCNEWVEC (edge, max_edges);
cfg_layout_initialize (0);

View File

@ -779,12 +779,12 @@ static void
build_btr_def_use_webs (fibheap_t all_btr_defs)
{
const int max_uid = get_max_uid ();
btr_def *def_array = xcalloc (max_uid, sizeof (btr_def));
btr_user *use_array = xcalloc (max_uid, sizeof (btr_user));
btr_def *def_array = XCNEWVEC (btr_def, max_uid);
btr_user *use_array = XCNEWVEC (btr_user, max_uid);
sbitmap *btr_defset = sbitmap_vector_alloc (
(last_btr - first_btr) + 1, max_uid);
sbitmap *bb_gen = sbitmap_vector_alloc (n_basic_blocks, max_uid);
HARD_REG_SET *btrs_written = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
HARD_REG_SET *btrs_written = XCNEWVEC (HARD_REG_SET, n_basic_blocks);
sbitmap *bb_kill;
sbitmap *bb_out;
@ -903,7 +903,7 @@ augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
{
basic_block *worklist, *tos;
tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
if (dominated_by_p (CDI_DOMINATORS, new_bb, head_bb))
{

View File

@ -842,7 +842,7 @@ store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
= (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
}
args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
/* Structures smaller than a word are normally aligned to the
least significant byte. On a BYTES_BIG_ENDIAN machine,
@ -2369,7 +2369,7 @@ expand_call (tree exp, rtx target, int ignore)
#endif
if (stack_usage_map_buf)
free (stack_usage_map_buf);
stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
if (initial_highest_arg_in_use)
@ -2477,7 +2477,7 @@ expand_call (tree exp, rtx target, int ignore)
/* Make a new map for the new argument list. */
if (stack_usage_map_buf)
free (stack_usage_map_buf);
stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
highest_outgoing_arg_in_use = 0;
@ -3539,7 +3539,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
needed);
#endif
stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
stack_usage_map = stack_usage_map_buf;
if (initial_highest_arg_in_use)

View File

@ -167,11 +167,11 @@ mark_dfs_back_edges (void)
bool found = false;
/* Allocate the preorder and postorder number arrays. */
pre = xcalloc (last_basic_block, sizeof (int));
post = xcalloc (last_basic_block, sizeof (int));
pre = XCNEWVEC (int, last_basic_block);
post = XCNEWVEC (int, last_basic_block);
/* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
@ -282,7 +282,7 @@ find_unreachable_blocks (void)
edge_iterator ei;
basic_block *tos, *worklist, bb;
tos = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
tos = worklist = XNEWVEC (basic_block, n_basic_blocks);
/* Clear all the reachability flags. */
@ -356,10 +356,10 @@ create_edge_list (void)
num_edges += EDGE_COUNT (bb->succs);
}
elist = xmalloc (sizeof (struct edge_list));
elist = XNEW (struct edge_list);
elist->num_blocks = block_count;
elist->num_edges = num_edges;
elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
elist->index_to_edge = XNEWVEC (edge, num_edges);
num_edges = 0;
@ -660,7 +660,7 @@ post_order_compute (int *post_order, bool include_entry_exit)
post_order[post_order_num++] = EXIT_BLOCK;
/* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
@ -738,7 +738,7 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
sbitmap visited;
/* Allocate stack for back-tracking up CFG. */
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
sp = 0;
if (include_entry_exit)
@ -862,7 +862,7 @@ static void
flow_dfs_compute_reverse_init (depth_first_search_ds data)
{
/* Allocate stack for back-tracking up CFG. */
data->stack = xmalloc (n_basic_blocks * sizeof (basic_block));
data->stack = XNEWVEC (basic_block, n_basic_blocks);
data->sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
@ -972,7 +972,7 @@ dfs_enumerate_from (basic_block bb, int reverse,
v_size = size;
}
st = xcalloc (rslt_max, sizeof (basic_block));
st = XCNEWVEC (basic_block, rslt_max);
rslt[tv++] = st[sp++] = bb;
MARK_VISITED (bb);
while (sp)

View File

@ -484,8 +484,7 @@ try_forward_edges (int mode, basic_block b)
if (t)
{
if (!threaded_edges)
threaded_edges = xmalloc (sizeof (*threaded_edges)
* n_basic_blocks);
threaded_edges = XNEWVEC (edge, n_basic_blocks);
else
{
int i;

View File

@ -77,8 +77,8 @@ verify_flow_info (void)
basic_block *last_visited;
timevar_push (TV_CFG_VERIFY);
last_visited = xcalloc (last_basic_block, sizeof (basic_block));
edge_checksum = xcalloc (last_basic_block, sizeof (size_t));
last_visited = XCNEWVEC (basic_block, last_basic_block);
edge_checksum = XCNEWVEC (size_t, last_basic_block);
/* Check bb chain & numbers. */
last_bb_seen = ENTRY_BLOCK_PTR;

View File

@ -233,7 +233,7 @@ flow_loop_nodes_find (basic_block header, struct loop *loop)
if (loop->latch->loop_father != loop)
{
stack = xmalloc (n_basic_blocks * sizeof (basic_block));
stack = XNEWVEC (basic_block, n_basic_blocks);
sp = 0;
num_nodes++;
stack[sp++] = loop->latch;
@ -337,7 +337,7 @@ establish_preds (struct loop *loop)
if (loop->pred)
free (loop->pred);
loop->pred = xmalloc (sizeof (struct loop *) * loop->depth);
loop->pred = XNEWVEC (struct loop *, loop->depth);
memcpy (loop->pred, father->pred, sizeof (struct loop *) * father->depth);
loop->pred[father->depth] = father;
@ -667,10 +667,10 @@ flow_loops_find (struct loops *loops)
}
/* Allocate loop structures. */
loops->parray = xcalloc (num_loops + 1, sizeof (struct loop *));
loops->parray = XCNEWVEC (struct loop *, num_loops + 1);
/* Dummy loop containing whole function. */
loops->parray[0] = xcalloc (1, sizeof (struct loop));
loops->parray[0] = XCNEW (struct loop);
loops->parray[0]->next = NULL;
loops->parray[0]->inner = NULL;
loops->parray[0]->outer = NULL;
@ -694,8 +694,8 @@ flow_loops_find (struct loops *loops)
{
/* Compute depth first search order of the CFG so that outer
natural loops will be found before inner natural loops. */
dfs_order = xmalloc (n_basic_blocks * sizeof (int));
rc_order = xmalloc (n_basic_blocks * sizeof (int));
dfs_order = XNEWVEC (int, n_basic_blocks);
rc_order = XNEWVEC (int, n_basic_blocks);
pre_and_rev_post_order_compute (dfs_order, rc_order, false);
/* Save CFG derived information to avoid recomputing it. */
@ -716,7 +716,7 @@ flow_loops_find (struct loops *loops)
header = BASIC_BLOCK (rc_order[b]);
loop = loops->parray[num_loops] = xcalloc (1, sizeof (struct loop));
loop = loops->parray[num_loops] = XCNEW (struct loop);
loop->header = header;
loop->num = num_loops;
@ -789,7 +789,7 @@ get_loop_body (const struct loop *loop)
gcc_assert (loop->num_nodes);
tovisit = xcalloc (loop->num_nodes, sizeof (basic_block));
tovisit = XCNEWVEC (basic_block, loop->num_nodes);
tovisit[tv++] = loop->header;
if (loop->latch == EXIT_BLOCK_PTR)
@ -852,7 +852,7 @@ get_loop_body_in_dom_order (const struct loop *loop)
gcc_assert (loop->num_nodes);
tovisit = xcalloc (loop->num_nodes, sizeof (basic_block));
tovisit = XCNEWVEC (basic_block, loop->num_nodes);
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
@ -878,7 +878,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
gcc_assert (loop->num_nodes);
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
blocks = xcalloc (loop->num_nodes, sizeof (basic_block));
blocks = XCNEWVEC (basic_block, loop->num_nodes);
visited = BITMAP_ALLOC (NULL);
bb = loop->header;
@ -932,7 +932,7 @@ get_loop_exit_edges (const struct loop *loop, unsigned int *num_edges)
FOR_EACH_EDGE (e, ei, body[i]->succs)
if (!flow_bb_inside_loop_p (loop, e->dest))
n++;
edges = xmalloc (n * sizeof (edge));
edges = XNEWVEC (edge, n);
*num_edges = n;
n = 0;
for (i = 0; i < loop->num_nodes; i++)
@ -1062,7 +1062,7 @@ verify_loop_structure (struct loops *loops)
edge e;
/* Check sizes. */
sizes = xcalloc (loops->num, sizeof (int));
sizes = XCNEWVEC (unsigned, loops->num);
sizes[0] = 2;
FOR_EACH_BB (bb)

View File

@ -111,10 +111,10 @@ void dump_graph (FILE *f, struct graph *g)
static struct graph *
new_graph (int n_vertices)
{
struct graph *g = xmalloc (sizeof (struct graph));
struct graph *g = XNEW (struct graph);
g->n_vertices = n_vertices;
g->vertices = xcalloc (n_vertices, sizeof (struct vertex));
g->vertices = XCNEWVEC (struct vertex, n_vertices);
return g;
}
@ -271,8 +271,8 @@ mark_irreducible_loops (struct loops *loops)
edge_iterator ei;
int i, src, dest;
struct graph *g;
int *queue1 = xmalloc ((last_basic_block + loops->num) * sizeof (int));
int *queue2 = xmalloc ((last_basic_block + loops->num) * sizeof (int));
int *queue1 = XNEWVEC (int, last_basic_block + loops->num);
int *queue2 = XNEWVEC (int, last_basic_block + loops->num);
int nq, depth;
struct loop *cloop;

View File

@ -85,7 +85,7 @@ find_path (edge e, basic_block **bbs)
gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
/* Find bbs in the path. */
*bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
*bbs = XCNEWVEC (basic_block, n_basic_blocks);
return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
n_basic_blocks, e->dest);
}
@ -159,7 +159,7 @@ fix_bb_placements (struct loops *loops, basic_block from)
/* Prevent us from going out of the base_loop. */
SET_BIT (in_queue, base_loop->header->index);
queue = xmalloc ((base_loop->num_nodes + 1) * sizeof (basic_block));
queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
qtop = queue + base_loop->num_nodes + 1;
qbeg = queue;
qend = queue + 1;
@ -244,7 +244,7 @@ fix_irreducible_loops (basic_block from)
on_stack = sbitmap_alloc (last_basic_block);
sbitmap_zero (on_stack);
SET_BIT (on_stack, from->index);
stack = xmalloc (from->loop_father->num_nodes * sizeof (basic_block));
stack = XNEWVEC (basic_block, from->loop_father->num_nodes);
stack[0] = from;
stack_top = 1;
@ -266,7 +266,7 @@ fix_irreducible_loops (basic_block from)
else
{
num_edges = EDGE_COUNT (bb->succs);
edges = xmalloc (num_edges * sizeof (edge));
edges = XNEWVEC (edge, num_edges);
FOR_EACH_EDGE (e, ei, bb->succs)
edges[ei.index] = e;
}
@ -331,7 +331,7 @@ remove_path (struct loops *loops, edge e)
nrem = find_path (e, &rem_bbs);
n_bord_bbs = 0;
bord_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
bord_bbs = XCNEWVEC (basic_block, n_basic_blocks);
seen = sbitmap_alloc (last_basic_block);
sbitmap_zero (seen);
@ -354,7 +354,7 @@ remove_path (struct loops *loops, edge e)
from = e->src;
deleted = loop_delete_branch_edge (e, 1);
gcc_assert (deleted);
dom_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
dom_bbs = XCNEWVEC (basic_block, n_basic_blocks);
/* Cancel loops contained in the path. */
for (i = 0; i < nrem; i++)
@ -423,7 +423,7 @@ add_loop (struct loops *loops, struct loop *loop)
loop->level = 1;
/* Find its nodes. */
bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
bbs = XCNEWVEC (basic_block, n_basic_blocks);
n = dfs_enumerate_from (loop->latch, 1, alp_enum_p,
bbs, n_basic_blocks, loop->header);
@ -464,7 +464,7 @@ loopify (struct loops *loops, edge latch_edge, edge header_edge,
basic_block *dom_bbs, *body;
unsigned n_dom_bbs, i;
sbitmap seen;
struct loop *loop = xcalloc (1, sizeof (struct loop));
struct loop *loop = XCNEW (struct loop);
struct loop *outer = succ_bb->loop_father->outer;
int freq, prob, tot_prob;
gcov_type cnt;
@ -515,7 +515,7 @@ loopify (struct loops *loops, edge latch_edge, edge header_edge,
scale_loop_frequencies (succ_bb->loop_father, tot_prob - prob, tot_prob);
/* Update dominators of blocks outside of LOOP. */
dom_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
dom_bbs = XCNEWVEC (basic_block, n_basic_blocks);
n_dom_bbs = 0;
seen = sbitmap_alloc (last_basic_block);
sbitmap_zero (seen);
@ -681,7 +681,7 @@ struct loop *
duplicate_loop (struct loops *loops, struct loop *loop, struct loop *target)
{
struct loop *cloop;
cloop = xcalloc (1, sizeof (struct loop));
cloop = XCNEW (struct loop);
place_new_loop (loops, cloop);
/* Initialize copied loop. */
@ -867,7 +867,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
free (bbs);
return false;
}
new_bbs = xmalloc (sizeof (basic_block) * loop->num_nodes);
new_bbs = XNEWVEC (basic_block, loop->num_nodes);
/* In case we are doing loop peeling and the loop is in the middle of
irreducible region, the peeled copies will be inside it too. */
@ -894,7 +894,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
prob_pass_wont_exit =
RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
scale_step = xmalloc (ndupl * sizeof (int));
scale_step = XNEWVEC (int, ndupl);
for (i = 1; i <= ndupl; i++)
scale_step[i - 1] = TEST_BIT (wont_exit, i)
@ -957,13 +957,13 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
n_orig_loops = 0;
for (aloop = loop->inner; aloop; aloop = aloop->next)
n_orig_loops++;
orig_loops = xcalloc (n_orig_loops, sizeof (struct loop *));
orig_loops = XCNEWVEC (struct loop *, n_orig_loops);
for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
orig_loops[i] = aloop;
loop->copy = target;
first_active = xmalloc (n * sizeof (basic_block));
first_active = XNEWVEC (basic_block, n);
if (is_latch)
{
memcpy (first_active, bbs, n * sizeof (basic_block));
@ -1296,7 +1296,7 @@ create_loop_notes (void)
free_dominance_info (CDI_DOMINATORS);
if (loops.num > 1)
{
last = xcalloc (loops.num, sizeof (basic_block));
last = XCNEWVEC (basic_block, loops.num);
FOR_EACH_BB (bb)
{
@ -1304,8 +1304,8 @@ create_loop_notes (void)
last[loop->num] = bb;
}
first = xcalloc (loops.num, sizeof (basic_block));
stack = xcalloc (loops.num, sizeof (struct loop *));
first = XCNEWVEC (basic_block, loops.num);
stack = XCNEWVEC (struct loop *, loops.num);
top = stack;
FOR_EACH_BB (bb)

View File

@ -1806,9 +1806,9 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
{
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
int max_uid = get_max_uid ();
basic_block *start = xcalloc (max_uid, sizeof (basic_block));
basic_block *end = xcalloc (max_uid, sizeof (basic_block));
enum bb_state *in_bb_p = xcalloc (max_uid, sizeof (enum bb_state));
basic_block *start = XCNEWVEC (basic_block, max_uid);
basic_block *end = XCNEWVEC (basic_block, max_uid);
enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
basic_block bb;
@ -1916,7 +1916,7 @@ rtl_verify_flow_info_1 (void)
int err = 0;
basic_block bb;
bb_info = xcalloc (max_uid, sizeof (basic_block));
bb_info = XCNEWVEC (basic_block, max_uid);
FOR_EACH_BB_REVERSE (bb)
{

View File

@ -1143,8 +1143,7 @@ static void
cgraph_expand_all_functions (void)
{
struct cgraph_node *node;
struct cgraph_node **order =
xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
int order_pos = 0, new_order_pos = 0;
int i;

View File

@ -579,7 +579,7 @@ find_a_file (struct path_prefix *pprefix, const char *name)
len += strlen (HOST_EXECUTABLE_SUFFIX);
#endif
temp = xmalloc (len);
temp = XNEWVEC (char, len);
/* Determine the filename to execute (special case for absolute paths). */
@ -667,7 +667,7 @@ add_prefix (struct path_prefix *pprefix, const char *prefix)
if (len > pprefix->max_len)
pprefix->max_len = len;
pl = xmalloc (sizeof (struct prefix_list));
pl = XNEW (struct prefix_list);
pl->prefix = xstrdup (prefix);
if (*prev)
@ -694,7 +694,7 @@ static void
prefix_from_string (const char *p, struct path_prefix *pprefix)
{
const char *startp, *endp;
char *nstore = xmalloc (strlen (p) + 3);
char *nstore = XNEWVEC (char, strlen (p) + 3);
if (debug)
fprintf (stderr, "Convert string '%s' into prefixes, separator = '%c'\n", p, PATH_SEPARATOR);
@ -1367,7 +1367,7 @@ main (int argc, char **argv)
/* Strip now if it was requested on the command line. */
if (strip_flag)
{
char **real_strip_argv = xcalloc (sizeof (char *), 3);
char **real_strip_argv = XCNEWVEC (char *, 3);
const char ** strip_argv = (const char **) real_strip_argv;
strip_argv[0] = strip_file_name;
@ -1801,7 +1801,7 @@ write_c_file_stat (FILE *stream, const char *name ATTRIBUTE_UNUSED)
}
}
/* q points to null at end of the string (or . of the .so version) */
prefix = xmalloc (q - p + 1);
prefix = XNEWVEC (char, q - p + 1);
strncpy (prefix, p, q - p);
prefix[q - p] = 0;
for (r = prefix; *r; r++)

View File

@ -503,7 +503,7 @@ do_SUBST (rtx *into, rtx newval)
if (undobuf.frees)
buf = undobuf.frees, undobuf.frees = buf->next;
else
buf = xmalloc (sizeof (struct undo));
buf = XNEW (struct undo);
buf->kind = UNDO_RTX;
buf->where.r = into;
@ -531,7 +531,7 @@ do_SUBST_INT (int *into, int newval)
if (undobuf.frees)
buf = undobuf.frees, undobuf.frees = buf->next;
else
buf = xmalloc (sizeof (struct undo));
buf = XNEW (struct undo);
buf->kind = UNDO_INT;
buf->where.i = into;
@ -560,7 +560,7 @@ do_SUBST_MODE (rtx *into, enum machine_mode newval)
if (undobuf.frees)
buf = undobuf.frees, undobuf.frees = buf->next;
else
buf = xmalloc (sizeof (struct undo));
buf = XNEW (struct undo);
buf->kind = UNDO_MODE;
buf->where.r = into;
@ -708,7 +708,7 @@ combine_instructions (rtx f, unsigned int nregs)
rtl_hooks = combine_rtl_hooks;
reg_stat = xcalloc (nregs, sizeof (struct reg_stat));
reg_stat = XCNEWVEC (struct reg_stat, nregs);
init_recog_no_volatile ();
@ -718,7 +718,7 @@ combine_instructions (rtx f, unsigned int nregs)
if (INSN_UID (insn) > i)
i = INSN_UID (insn);
uid_cuid = xmalloc ((i + 1) * sizeof (int));
uid_cuid = XNEWVEC (int, i + 1);
max_uid_cuid = i;
nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
@ -747,7 +747,7 @@ combine_instructions (rtx f, unsigned int nregs)
sbitmap_zero (refresh_blocks);
/* Allocate array of current insn_rtx_costs. */
uid_insn_cost = xcalloc (max_uid_cuid + 1, sizeof (int));
uid_insn_cost = XCNEWVEC (int, max_uid_cuid + 1);
last_insn_cost = max_uid_cuid;
for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))

View File

@ -7473,7 +7473,7 @@ add_minipool_forward_ref (Mfix *fix)
any existing entry. Otherwise, we insert the new fix before
MAX_MP and, if necessary, adjust the constraints on the other
entries. */
mp = xmalloc (sizeof (* mp));
mp = XNEW (Mnode);
mp->fix_size = fix->fix_size;
mp->mode = fix->mode;
mp->value = fix->value;
@ -7671,7 +7671,7 @@ add_minipool_backward_ref (Mfix *fix)
}
/* We need to create a new entry. */
mp = xmalloc (sizeof (* mp));
mp = XNEW (Mnode);
mp->fix_size = fix->fix_size;
mp->mode = fix->mode;
mp->value = fix->value;

View File

@ -62,7 +62,7 @@ static struct align_stack * field_align_stack = NULL;
static void
push_field_alignment (int bit_alignment)
{
align_stack *entry = (align_stack *) xmalloc (sizeof (align_stack));
align_stack *entry = XNEW (align_stack);
entry->alignment = maximum_field_alignment;
entry->prev = field_align_stack;
@ -194,7 +194,7 @@ add_framework (const char *name, size_t len, cpp_dir *dir)
frameworks_in_use = xrealloc (frameworks_in_use,
max_frameworks*sizeof(*frameworks_in_use));
}
dir_name = xmalloc (len + 1);
dir_name = XNEWVEC (char, len + 1);
memcpy (dir_name, name, len);
dir_name[len] = '\0';
frameworks_in_use[num_frameworks].name = dir_name;
@ -261,7 +261,7 @@ framework_construct_pathname (const char *fname, cpp_dir *dir)
if (fast_dir && dir != fast_dir)
return 0;
frname = xmalloc (strlen (fname) + dir->len + 2
frname = XNEWVEC (char, strlen (fname) + dir->len + 2
+ strlen(".framework/") + strlen("PrivateHeaders"));
strncpy (&frname[0], dir->name, dir->len);
frname_len = dir->len;
@ -349,7 +349,7 @@ find_subframework_file (const char *fname, const char *pname)
into
sfrname = /System/Library/Frameworks/Foundation.framework/Frameworks/CarbonCore.framework/Headers/OSUtils.h */
sfrname = (char *) xmalloc (strlen (pname) + strlen (fname) + 2 +
sfrname = XNEWVEC (char, strlen (pname) + strlen (fname) + 2 +
strlen ("Frameworks/") + strlen (".framework/")
+ strlen ("PrivateHeaders"));
@ -405,7 +405,7 @@ add_system_framework_path (char *path)
int cxx_aware = 1;
cpp_dir *p;
p = xmalloc (sizeof (cpp_dir));
p = XNEW (cpp_dir);
p->next = NULL;
p->name = path;
p->sysp = 1 + !cxx_aware;
@ -423,7 +423,7 @@ add_framework_path (char *path)
{
cpp_dir *p;
p = xmalloc (sizeof (cpp_dir));
p = XNEW (cpp_dir);
p->next = NULL;
p->name = path;
p->sysp = 0;

View File

@ -1342,7 +1342,7 @@ darwin_emit_unwind_label (FILE *file, tree decl, int for_eh, int empty)
if (! for_eh)
suffix = ".eh1";
lab = xmalloc (strlen (prefix)
lab = XNEWVEC (char, strlen (prefix)
+ base_len + strlen (suffix) + quotes_len + 1);
lab[0] = '\0';

View File

@ -147,7 +147,7 @@ arc_eq (const void *arcp1, const void *arcp2)
conflict_graph
conflict_graph_new (int num_regs)
{
conflict_graph graph = xmalloc (sizeof (struct conflict_graph_def));
conflict_graph graph = XNEW (struct conflict_graph_def);
graph->num_regs = num_regs;
/* Set up the hash table. No delete action is specified; memory
@ -159,7 +159,7 @@ conflict_graph_new (int num_regs)
obstack_init (&graph->arc_obstack);
/* Create and zero the lookup table by register number. */
graph->neighbor_heads = xcalloc (num_regs, sizeof (conflict_graph_arc));
graph->neighbor_heads = XCNEWVEC (conflict_graph_arc, num_regs);
return graph;
}

View File

@ -262,12 +262,12 @@ read_counts_file (void)
entry = *slot;
if (!entry)
{
*slot = entry = xcalloc (1, sizeof (counts_entry_t));
*slot = entry = XCNEW (counts_entry_t);
entry->ident = elt.ident;
entry->ctr = elt.ctr;
entry->checksum = checksum;
entry->summary.num = n_counts;
entry->counts = xcalloc (n_counts, sizeof (gcov_type));
entry->counts = XCNEWVEC (gcov_type, n_counts);
}
else if (entry->checksum != checksum)
{
@ -569,7 +569,7 @@ coverage_end_function (void)
{
struct function_list *item;
item = xmalloc (sizeof (struct function_list));
item = XNEW (struct function_list);
*functions_tail = item;
functions_tail = &item->next;
@ -951,12 +951,12 @@ coverage_init (const char *filename)
int len = strlen (filename);
/* Name of da file. */
da_file_name = xmalloc (len + strlen (GCOV_DATA_SUFFIX) + 1);
da_file_name = XNEWVEC (char, len + strlen (GCOV_DATA_SUFFIX) + 1);
strcpy (da_file_name, filename);
strcat (da_file_name, GCOV_DATA_SUFFIX);
/* Name of bbg file. */
bbg_file_name = xmalloc (len + strlen (GCOV_NOTE_SUFFIX) + 1);
bbg_file_name = XNEWVEC (char, len + strlen (GCOV_NOTE_SUFFIX) + 1);
strcpy (bbg_file_name, filename);
strcat (bbg_file_name, GCOV_NOTE_SUFFIX);

View File

@ -172,7 +172,7 @@ lang_specific_driver (int *in_argc, const char *const **in_argv,
return;
/* One more slot for a terminating null. */
new_argv = xmalloc ((new_argc + 1) * sizeof(char *));
new_argv = XNEWVEC (const char *, new_argc + 1);
new_argv[0] = argv[0];
j = 1;

View File

@ -867,8 +867,7 @@ init_cse_reg_info (unsigned int nregs)
/* Reallocate the table with NEW_SIZE entries. */
if (cse_reg_info_table)
free (cse_reg_info_table);
cse_reg_info_table = xmalloc (sizeof (struct cse_reg_info)
* new_size);
cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
cse_reg_info_table_size = new_size;
cse_reg_info_table_first_uninitialized = 0;
}
@ -1511,7 +1510,7 @@ insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mo
if (elt)
free_element_chain = elt->next_same_hash;
else
elt = xmalloc (sizeof (struct table_elt));
elt = XNEW (struct table_elt);
elt->exp = x;
elt->canon_exp = NULL_RTX;
@ -6761,8 +6760,7 @@ cse_main (rtx f, int nregs, FILE *file)
init_cse_reg_info (nregs);
val.path = xmalloc (sizeof (struct branch_path)
* PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
cse_jumps_altered = 0;
recorded_label_ref = 0;
@ -6774,12 +6772,12 @@ cse_main (rtx f, int nregs, FILE *file)
init_recog ();
init_alias_analysis ();
reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
/* Find the largest uid. */
max_uid = get_max_uid ();
uid_cuid = xcalloc (max_uid + 1, sizeof (int));
uid_cuid = XCNEWVEC (int, max_uid + 1);
/* Compute the mapping from uids to cuids.
CUIDs are numbers assigned to insns, like uids,
@ -6884,7 +6882,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
int no_conflict = 0;
/* Allocate the space needed by qty_table. */
qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
qty_table = XNEWVEC (struct qty_table_elem, max_qty);
new_basic_block ();
@ -7047,8 +7045,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
following branches in this case. */
to_usage = 0;
val.path_size = 0;
val.path = xmalloc (sizeof (struct branch_path)
* PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
cse_end_of_basic_block (insn, &val, 0, 0);
free (val.path);
@ -7351,7 +7348,7 @@ delete_trivially_dead_insns (rtx insns, int nreg)
timevar_push (TV_DELETE_TRIVIALLY_DEAD);
/* First count the number of times each register is used. */
counts = xcalloc (nreg, sizeof (int));
counts = XCNEWVEC (int, nreg);
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
count_reg_usage (insn, counts, NULL_RTX, 1);

View File

@ -1472,9 +1472,9 @@ cselib_init (bool record_memory)
/* Some space for newly emit instructions so we don't end up
reallocating in between passes. */
reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
reg_values = xcalloc (reg_values_size, sizeof (reg_values));
reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
}
used_regs = xmalloc (sizeof (*used_regs) * cselib_nregs);
used_regs = XNEWVEC (unsigned int, cselib_nregs);
n_used_regs = 0;
cselib_hash_table = htab_create (31, get_value_hash,
entry_and_rtx_equal_p, NULL);

View File

@ -1049,7 +1049,7 @@ dbxout_init (const char *input_file_name)
next_type_number = 1;
#ifdef DBX_USE_BINCL
current_file = xmalloc (sizeof *current_file);
current_file = XNEW (struct dbx_file);
current_file->next = NULL;
current_file->file_number = 0;
current_file->next_type_number = 1;
@ -1158,7 +1158,7 @@ dbxout_start_source_file (unsigned int line ATTRIBUTE_UNUSED,
const char *filename ATTRIBUTE_UNUSED)
{
#ifdef DBX_USE_BINCL
struct dbx_file *n = xmalloc (sizeof *n);
struct dbx_file *n = XNEW (struct dbx_file);
n->next = current_file;
n->next_type_number = 1;

View File

@ -310,7 +310,7 @@ static void df_set_bb_info (struct dataflow *, unsigned int, void *);
struct df *
df_init (int flags)
{
struct df *df = xcalloc (1, sizeof (struct df));
struct df *df = XCNEW (struct df);
df->flags = flags;
/* This is executed once per compilation to initialize platform
@ -342,7 +342,7 @@ df_add_problem (struct df *df, struct df_problem *problem)
return dflow;
/* Make a new one and add it to the end. */
dflow = xcalloc (1, sizeof (struct dataflow));
dflow = XCNEW (struct dataflow);
dflow->df = df;
dflow->problem = problem;
df->problems_in_order[df->num_problems_defined++] = dflow;
@ -731,7 +731,7 @@ df_analyze_problem (struct dataflow *dflow,
void
df_analyze (struct df *df)
{
int *postorder = xmalloc (sizeof (int) *last_basic_block);
int *postorder = XNEWVEC (int, last_basic_block);
bitmap current_all_blocks = BITMAP_ALLOC (NULL);
int n_blocks;
int i;

View File

@ -385,11 +385,10 @@ df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
}
else
{
struct df_ru_problem_data *problem_data =
xmalloc (sizeof (struct df_ru_problem_data));
struct df_ru_problem_data *problem_data = XNEW (struct df_ru_problem_data);
dflow->problem_data = problem_data;
problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
problem_data->use_sites = XCNEWVEC (bitmap, reg_size);
problem_data->use_sites_size = reg_size;
problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
@ -909,11 +908,10 @@ df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
}
else
{
struct df_rd_problem_data *problem_data =
xmalloc (sizeof (struct df_rd_problem_data));
struct df_rd_problem_data *problem_data = XNEW (struct df_rd_problem_data);
dflow->problem_data = problem_data;
problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
problem_data->def_sites = XCNEWVEC (bitmap, reg_size);
problem_data->def_sites_size = reg_size;
problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
@ -2130,7 +2128,7 @@ df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
if (!dflow->problem_data)
{
problem_data = xmalloc (sizeof (struct df_urec_problem_data));
problem_data = XNEW (struct df_urec_problem_data);
dflow->problem_data = problem_data;
}
problem_data->earlyclobbers_found = false;
@ -3066,7 +3064,7 @@ struct dataflow *
df_chain_add_problem (struct df *df, int flags)
{
struct df_chain_problem_data *problem_data =
xmalloc (sizeof (struct df_chain_problem_data));
XNEW (struct df_chain_problem_data);
struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
dflow->problem_data = problem_data;
@ -3101,8 +3099,7 @@ df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
if (!dflow->problem_data)
{
struct df_ri_problem_data *problem_data =
xmalloc (sizeof (struct df_ri_problem_data));
struct df_ri_problem_data *problem_data = XNEW (struct df_ri_problem_data);
dflow->problem_data = problem_data;
}

View File

@ -219,7 +219,7 @@ df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
sizeof (struct df_scan_bb_info),
block_size);
problem_data = xmalloc (sizeof (struct df_scan_problem_data));
problem_data = XNEW (struct df_scan_problem_data);
dflow->problem_data = problem_data;
problem_data->ref_pool

View File

@ -93,7 +93,7 @@ diagnostic_initialize (diagnostic_context *context)
{
/* Allocate a basic pretty-printer. Clients will replace this a
much more elaborated pretty-printer if they wish. */
context->printer = xmalloc (sizeof (pretty_printer));
context->printer = XNEW (pretty_printer);
pp_construct (context->printer, NULL, 0);
/* By default, diagnostics are sent to stderr. */
context->printer->buffer->stream = stderr;

View File

@ -132,10 +132,10 @@ static unsigned n_bbs_in_dom_tree[2];
{ \
unsigned int i = 1; /* Catch content == i. */ \
if (! (content)) \
(var) = xcalloc ((num), sizeof (type)); \
(var) = XCNEWVEC (type, num); \
else \
{ \
(var) = xmalloc ((num) * sizeof (type)); \
(var) = XNEWVEC (type, (num)); \
for (i = 0; i < num; i++) \
(var)[i] = (content); \
} \
@ -213,7 +213,7 @@ calc_dfs_tree_nonrec (struct dom_info *di, basic_block bb,
/* Ending block. */
basic_block ex_block;
stack = xmalloc ((n_basic_blocks + 1) * sizeof (edge_iterator));
stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
sp = 0;
/* Initialize our border blocks, and the first edge. */
@ -722,7 +722,7 @@ get_dominated_by (enum cdi_direction dir, basic_block bb, basic_block **bbs)
for (ason = son->right, n = 1; ason != son; ason = ason->right)
n++;
*bbs = xmalloc (n * sizeof (basic_block));
*bbs = XNEWVEC (basic_block, n);
(*bbs)[0] = son->data;
for (ason = son->right, n = 1; ason != son; ason = ason->right)
(*bbs)[n++] = ason->data;

View File

@ -405,7 +405,7 @@ expand_builtin_dwarf_sp_column (void)
static inline char *
stripattributes (const char *s)
{
char *stripped = xmalloc (strlen (s) + 2);
char *stripped = XNEWVEC (char, strlen (s) + 2);
char *p = stripped;
*p++ = '*';
@ -6315,7 +6315,7 @@ check_duplicate_cu (dw_die_ref cu, htab_t htable, unsigned int *sym_num)
return 1;
}
entry = xcalloc (1, sizeof (struct cu_hash_table_entry));
entry = XCNEW (struct cu_hash_table_entry);
entry->cu = cu;
entry->min_comdat_num = *sym_num = last->max_comdat_num;
entry->next = *slot;

View File

@ -1121,7 +1121,7 @@ add_ttypes_entry (htab_t ttypes_hash, tree type)
{
/* Filter value is a 1 based table index. */
n = xmalloc (sizeof (*n));
n = XNEW (struct ttypes_filter);
n->t = type;
n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
*slot = n;
@ -1149,7 +1149,7 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
{
/* Filter value is a -1 based byte index into a uleb128 buffer. */
n = xmalloc (sizeof (*n));
n = XNEW (struct ttypes_filter);
n->t = list;
n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
*slot = n;
@ -1956,8 +1956,7 @@ sjlj_build_landing_pads (void)
{
struct sjlj_lp_info *lp_info;
lp_info = xcalloc (cfun->eh->last_region_number + 1,
sizeof (struct sjlj_lp_info));
lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
if (sjlj_find_directly_reachable_regions (lp_info))
{

View File

@ -693,8 +693,7 @@ compute_alignments (void)
max_labelno = max_label_num ();
min_labelno = get_first_label_num ();
label_align = xcalloc (max_labelno - min_labelno + 1,
sizeof (struct label_alignment));
label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
/* If not optimizing or optimizing for size, don't assign any alignments. */
if (! optimize || optimize_size)
@ -817,7 +816,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
/* Free uid_shuid before reallocating it. */
free (uid_shuid);
uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
uid_shuid = XNEWVEC (int, max_uid);
if (max_labelno != max_label_num ())
{
@ -926,20 +925,20 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
#ifdef HAVE_ATTR_length
/* Allocate the rest of the arrays. */
insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
insn_lengths = XNEWVEC (int, max_uid);
insn_lengths_max_uid = max_uid;
/* Syntax errors can lead to labels being outside of the main insn stream.
Initialize insn_addresses, so that we get reproducible results. */
INSN_ADDRESSES_ALLOC (max_uid);
varying_length = xcalloc (max_uid, sizeof (char));
varying_length = XCNEWVEC (char, max_uid);
/* Initialize uid_align. We scan instructions
from end to start, and keep in align_tab[n] the last seen insn
that does an alignment of at least n+1, i.e. the successor
in the alignment chain for an insn that does / has a known
alignment of n. */
uid_align = xcalloc (max_uid, sizeof *uid_align);
uid_align = XCNEWVEC (rtx, max_uid);
for (i = MAX_CODE_ALIGN; --i >= 0;)
align_tab[i] = NULL_RTX;

View File

@ -1187,7 +1187,7 @@ main (int argc, char **argv)
exit (FATAL_EXIT_CODE);
}
inf_size = sbuf.st_size;
inf_buffer = xmalloc (inf_size + 2);
inf_buffer = XNEWVEC (char, inf_size + 2);
inf_ptr = inf_buffer;
to_read = inf_size;

View File

@ -577,7 +577,7 @@ update_life_info (sbitmap blocks, enum update_life_extent extent,
ndead = 0;
if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
reg_deaths = XCNEWVEC (int, max_regno);
timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
? TV_LIFE_UPDATE : TV_LIFE);
@ -1060,12 +1060,12 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags)
SET_REGNO_REG_SET (invalidated_by_call, i);
/* Allocate space for the sets of local properties. */
local_sets = xcalloc (last_basic_block, sizeof (regset));
cond_local_sets = xcalloc (last_basic_block, sizeof (regset));
local_sets = XCNEWVEC (bitmap, last_basic_block);
cond_local_sets = XCNEWVEC (bitmap, last_basic_block);
/* Create a worklist. Allocate an extra slot for the `head == tail'
style test for an empty queue doesn't work with a full queue. */
queue = xmalloc ((n_basic_blocks + 1) * sizeof (*queue));
queue = XNEWVEC (basic_block, n_basic_blocks + 1);
qtail = queue;
qhead = qend = queue + n_basic_blocks;
@ -1090,7 +1090,7 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags)
}
}
block_accesses = xcalloc (last_basic_block, sizeof (int));
block_accesses = XCNEWVEC (int, last_basic_block);
/* We clean aux when we remove the initially-enqueued bbs, but we
don't enqueue ENTRY and EXIT initially, so clean them upfront and
@ -1574,7 +1574,7 @@ allocate_reg_life_data (void)
max_regno = max_reg_num ();
gcc_assert (!reg_deaths);
reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
reg_deaths = XCNEWVEC (int, max_regno);
/* Recalculate the register space, in case it has grown. Old style
vector oriented regsets would set regset_{size,bytes} here also. */
@ -1940,7 +1940,7 @@ struct propagate_block_info *
init_propagate_block_info (basic_block bb, regset live, regset local_set,
regset cond_local_set, int flags)
{
struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
struct propagate_block_info *pbi = XNEW (struct propagate_block_info);
pbi->bb = bb;
pbi->reg_live = live;
@ -1953,7 +1953,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
pbi->insn_num = 0;
if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
pbi->reg_next_use = XCNEWVEC (rtx, max_reg_num ());
else
pbi->reg_next_use = NULL;
@ -2043,7 +2043,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
struct reg_cond_life_info *rcli;
rtx cond;
rcli = xmalloc (sizeof (*rcli));
rcli = XNEW (struct reg_cond_life_info);
if (REGNO_REG_SET_P (bb_true->il.rtl->global_live_at_start,
i))
@ -3058,7 +3058,7 @@ mark_regno_cond_dead (struct propagate_block_info *pbi, int regno, rtx cond)
/* The register was unconditionally live previously.
Record the current condition as the condition under
which it is dead. */
rcli = xmalloc (sizeof (*rcli));
rcli = XNEW (struct reg_cond_life_info);
rcli->condition = cond;
rcli->stores = cond;
rcli->orig_condition = const0_rtx;
@ -3858,7 +3858,7 @@ mark_used_reg (struct propagate_block_info *pbi, rtx reg,
{
/* The register was not previously live at all. Record
the condition under which it is still dead. */
rcli = xmalloc (sizeof (*rcli));
rcli = XNEW (struct reg_cond_life_info);
rcli->condition = not_reg_cond (cond);
rcli->stores = const0_rtx;
rcli->orig_condition = const0_rtx;

View File

@ -397,7 +397,7 @@ For more information about these matters, see the file named COPYING\n\n"));
if (argv[i][2] == '\0')
{
p = xmalloc (strlen (argv[i + 1]) + 2);
p = XNEWVEC (char, strlen (argv[i + 1]) + 2);
p[0] = '-';
p[1] = 'J';
strcpy (&p[2], argv[i + 1]);
@ -405,7 +405,7 @@ For more information about these matters, see the file named COPYING\n\n"));
}
else
{
p = xmalloc (strlen (argv[i]) + 1);
p = XNEWVEC (char, strlen (argv[i]) + 1);
strcpy (p, argv[i]);
}
append_arg (p);

View File

@ -3744,7 +3744,7 @@ get_block_vector (tree block, int *n_blocks_p)
tree *block_vector;
*n_blocks_p = all_blocks (block, NULL);
block_vector = xmalloc (*n_blocks_p * sizeof (tree));
block_vector = XNEWVEC (tree, *n_blocks_p);
all_blocks (block, block_vector);
return block_vector;

View File

@ -1829,7 +1829,7 @@ set_spec (const char *name, const char *spec)
if (!sl)
{
/* Not found - make it. */
sl = xmalloc (sizeof (struct spec_list));
sl = XNEW (struct spec_list);
sl->name = xstrdup (name);
sl->name_len = name_len;
sl->ptr_spec = &sl->ptr;
@ -1912,7 +1912,7 @@ static void
alloc_args (void)
{
argbuf_length = 10;
argbuf = xmalloc (argbuf_length * sizeof (const char *));
argbuf = XNEWVEC (const char *, argbuf_length);
}
/* Clear out the vector of arguments (after a command is executed). */
@ -1971,14 +1971,14 @@ load_specs (const char *filename)
pfatal_with_name (filename);
/* Read contents of file into BUFFER. */
buffer = xmalloc ((unsigned) statbuf.st_size + 1);
buffer = XNEWVEC (char, statbuf.st_size + 1);
readlen = read (desc, buffer, (unsigned) statbuf.st_size);
if (readlen < 0)
pfatal_with_name (filename);
buffer[readlen] = 0;
close (desc);
specs = xmalloc (readlen + 1);
specs = XNEWVEC (char, readlen + 1);
specs_p = specs;
for (buffer_p = buffer; buffer_p && *buffer_p; buffer_p++)
{
@ -2294,7 +2294,7 @@ record_temp_file (const char *filename, int always_delete, int fail_delete)
if (! strcmp (name, temp->name))
goto already1;
temp = xmalloc (sizeof (struct temp_file));
temp = XNEW (struct temp_file);
temp->next = always_delete_queue;
temp->name = name;
always_delete_queue = temp;
@ -2309,7 +2309,7 @@ record_temp_file (const char *filename, int always_delete, int fail_delete)
if (! strcmp (name, temp->name))
goto already2;
temp = xmalloc (sizeof (struct temp_file));
temp = XNEW (struct temp_file);
temp->next = failure_delete_queue;
temp->name = name;
failure_delete_queue = temp;
@ -2440,7 +2440,7 @@ for_each_path (const struct path_prefix *paths,
len += suffix_len;
else
len += multi_os_dir_len;
path = xmalloc (len);
path = XNEWVEC (char, len);
}
for (pl = paths->plist; pl != 0; pl = pl->next)
@ -2738,7 +2738,7 @@ add_prefix (struct path_prefix *pprefix, const char *prefix,
if (len > pprefix->max_len)
pprefix->max_len = len;
pl = xmalloc (sizeof (struct prefix_list));
pl = XNEW (struct prefix_list);
pl->prefix = prefix;
pl->require_machine_suffix = require_machine_suffix;
pl->priority = priority;
@ -3226,7 +3226,7 @@ add_preprocessor_option (const char *option, int len)
n_preprocessor_options++;
if (! preprocessor_options)
preprocessor_options = xmalloc (n_preprocessor_options * sizeof (char *));
preprocessor_options = XNEWVEC (char *, n_preprocessor_options);
else
preprocessor_options = xrealloc (preprocessor_options,
n_preprocessor_options * sizeof (char *));
@ -3241,7 +3241,7 @@ add_assembler_option (const char *option, int len)
n_assembler_options++;
if (! assembler_options)
assembler_options = xmalloc (n_assembler_options * sizeof (char *));
assembler_options = XNEWVEC (char *, n_assembler_options);
else
assembler_options = xrealloc (assembler_options,
n_assembler_options * sizeof (char *));
@ -3255,7 +3255,7 @@ add_linker_option (const char *option, int len)
n_linker_options++;
if (! linker_options)
linker_options = xmalloc (n_linker_options * sizeof (char *));
linker_options = XNEWVEC (char *, n_linker_options);
else
linker_options = xrealloc (linker_options,
n_linker_options * sizeof (char *));
@ -3685,7 +3685,7 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"
}
else if (strcmp (argv[i], "-specs") == 0)
{
struct user_specs *user = xmalloc (sizeof (struct user_specs));
struct user_specs *user = XNEW (struct user_specs);
if (++i >= argc)
fatal ("argument to '-specs' is missing");
@ -3699,7 +3699,7 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"
}
else if (strncmp (argv[i], "-specs=", 7) == 0)
{
struct user_specs *user = xmalloc (sizeof (struct user_specs));
struct user_specs *user = XNEW (struct user_specs);
if (strlen (argv[i]) == 7)
fatal ("argument to '-specs=' is missing");
@ -3766,7 +3766,7 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"
if (! IS_DIR_SEPARATOR (value [len - 1])
&& is_directory (value, false))
{
char *tmp = xmalloc (len + 2);
char *tmp = XNEWVEC (char, len + 2);
strcpy (tmp, value);
tmp[len] = DIR_SEPARATOR;
tmp[++ len] = 0;
@ -4013,8 +4013,8 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"
/* Then create the space for the vectors and scan again. */
switches = xmalloc ((n_switches + 1) * sizeof (struct switchstr));
infiles = xmalloc ((n_infiles + 1) * sizeof (struct infile));
switches = XNEWVEC (struct switchstr, n_switches + 1);
infiles = XNEWVEC (struct infile, n_infiles + 1);
n_switches = 0;
n_infiles = 0;
last_language_n_infiles = -1;
@ -4164,7 +4164,7 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"
if (i + n_args >= argc)
fatal ("argument to '-%s' is missing", p);
switches[n_switches].args
= xmalloc ((n_args + 1) * sizeof(const char *));
= XNEWVEC (const char *, n_args + 1);
while (j < n_args)
switches[n_switches].args[j++] = argv[++i];
/* Null-terminate the vector. */
@ -4174,12 +4174,12 @@ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"
{
/* On some systems, ld cannot handle some options without
a space. So split the option from its argument. */
char *part1 = xmalloc (2);
char *part1 = XNEWVEC (char, 2);
part1[0] = c;
part1[1] = '\0';
switches[n_switches].part1 = part1;
switches[n_switches].args = xmalloc (2 * sizeof (const char *));
switches[n_switches].args = XNEWVEC (const char *, 2);
switches[n_switches].args[0] = xstrdup (p+1);
switches[n_switches].args[1] = 0;
}
@ -4822,7 +4822,7 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
else
{
saved_suffix
= xmalloc (suffix_length
= XNEWVEC (char, suffix_length
+ strlen (TARGET_OBJECT_SUFFIX));
strncpy (saved_suffix, suffix, suffix_length);
strcpy (saved_suffix + suffix_length,
@ -6477,11 +6477,11 @@ main (int argc, char **argv)
i = n_infiles;
i += lang_specific_extra_outfiles;
outfiles = xcalloc (i, sizeof (char *));
outfiles = XCNEWVEC (const char *, i);
/* Record which files were specified explicitly as link input. */
explicit_link_files = xcalloc (1, n_infiles);
explicit_link_files = XCNEWVEC (bool, n_infiles);
if (combine_flag)
combine_inputs = true;
@ -6792,7 +6792,7 @@ lookup_compiler (const char *name, size_t length, const char *language)
static char *
save_string (const char *s, int len)
{
char *result = xmalloc (len + 1);
char *result = XNEWVEC (char, len + 1);
memcpy (result, s, len);
result[len] = 0;
@ -7042,8 +7042,7 @@ used_arg (const char *p, int len)
xmalloc from calling fatal, and prevents us from re-executing this
block of code. */
mswitches
= xmalloc (sizeof (struct mswitchstr)
* (n_mdswitches + (n_switches ? n_switches : 1)));
= XNEWVEC (struct mswitchstr, n_mdswitches + (n_switches ? n_switches : 1));
for (i = 0; i < n_switches; i++)
if (switches[i].live_cond != SWITCH_IGNORE)
{
@ -7170,7 +7169,7 @@ set_multilib_dir (void)
{
int i = 0;
mdswitches = xmalloc (sizeof (struct mdswitchstr) * n_mdswitches);
mdswitches = XNEWVEC (struct mdswitchstr, n_mdswitches);
for (start = multilib_defaults; *start != '\0'; start = end + 1)
{
while (*start == ' ' || *start == '\t')
@ -7332,7 +7331,7 @@ set_multilib_dir (void)
if (this_path_len != 1
|| this_path[0] != '.')
{
char *new_multilib_dir = xmalloc (this_path_len + 1);
char *new_multilib_dir = XNEWVEC (char, this_path_len + 1);
char *q;
strncpy (new_multilib_dir, this_path, this_path_len);
@ -7353,7 +7352,7 @@ set_multilib_dir (void)
q++;
if (q < end)
{
char *new_multilib_os_dir = xmalloc (end - q);
char *new_multilib_os_dir = XNEWVEC (char, end - q);
memcpy (new_multilib_os_dir, q + 1, end - q - 1);
new_multilib_os_dir[end - q - 1] = '\0';
multilib_os_dir = new_multilib_os_dir;

View File

@ -73,7 +73,7 @@ lang_specific_driver (int *in_argc ATTRIBUTE_UNUSED,
if (shared_libgcc)
{
/* Make sure to have room for the trailing NULL argument. */
arglist = xmalloc ((argc+2) * sizeof (char *));
arglist = XNEWVEC (const char *, argc + 2);
i = 0;
do

View File

@ -514,7 +514,7 @@ process_file (const char *file_name)
for (fn = functions; fn; fn = fn->next)
solve_flow_graph (fn);
for (src = sources; src; src = src->next)
src->lines = xcalloc (src->num_lines, sizeof (line_t));
src->lines = XCNEWVEC (line_t, src->num_lines);
for (fn = functions; fn; fn = fn->next)
{
coverage_t coverage;
@ -621,7 +621,7 @@ create_file_names (const char *file_name)
struct stat status;
length += strlen (object_directory) + 2;
name = xmalloc (length);
name = XNEWVEC (char, length);
name[0] = 0;
base = !stat (object_directory, &status) && S_ISDIR (status.st_mode);
@ -631,7 +631,7 @@ create_file_names (const char *file_name)
}
else
{
name = xmalloc (length + 1);
name = XNEWVEC (char, length + 1);
name[0] = 0;
base = 1;
}
@ -650,11 +650,11 @@ create_file_names (const char *file_name)
length = strlen (name);
bbg_file_name = xmalloc (length + strlen (GCOV_NOTE_SUFFIX) + 1);
bbg_file_name = XNEWVEC (char, length + strlen (GCOV_NOTE_SUFFIX) + 1);
strcpy (bbg_file_name, name);
strcpy (bbg_file_name + length, GCOV_NOTE_SUFFIX);
da_file_name = xmalloc (length + strlen (GCOV_DATA_SUFFIX) + 1);
da_file_name = XNEWVEC (char, length + strlen (GCOV_DATA_SUFFIX) + 1);
strcpy (da_file_name, name);
strcpy (da_file_name + length, GCOV_DATA_SUFFIX);
@ -676,7 +676,7 @@ find_source (const char *file_name)
if (!strcmp (file_name, src->name))
return src;
src = xcalloc (1, sizeof (source_t));
src = XCNEW (source_t);
src->name = xstrdup (file_name);
src->coverage.name = src->name;
src->index = sources ? sources->index + 1 : 1;
@ -742,7 +742,7 @@ read_graph_file (void)
src = find_source (gcov_read_string ());
lineno = gcov_read_unsigned ();
fn = xcalloc (1, sizeof (function_t));
fn = XCNEW (function_t);
fn->name = function_name;
fn->ident = ident;
fn->checksum = checksum;
@ -778,7 +778,7 @@ read_graph_file (void)
unsigned ix, num_blocks = GCOV_TAG_BLOCKS_NUM (length);
fn->num_blocks = num_blocks;
fn->blocks = xcalloc (fn->num_blocks, sizeof (block_t));
fn->blocks = XCNEWVEC (block_t, fn->num_blocks);
for (ix = 0; ix != num_blocks; ix++)
fn->blocks[ix].flags = gcov_read_unsigned ();
}
@ -799,7 +799,7 @@ read_graph_file (void)
if (dest >= fn->num_blocks)
goto corrupt;
arc = xcalloc (1, sizeof (arc_t));
arc = XCNEW (arc_t);
arc->dst = &fn->blocks[dest];
arc->src = &fn->blocks[src];
@ -844,7 +844,7 @@ read_graph_file (void)
else if (fn && tag == GCOV_TAG_LINES)
{
unsigned blockno = gcov_read_unsigned ();
unsigned *line_nos = xcalloc (length - 1, sizeof (unsigned));
unsigned *line_nos = XCNEWVEC (unsigned, length - 1);
if (blockno >= fn->num_blocks || fn->blocks[blockno].u.line.encoding)
goto corrupt;
@ -1037,7 +1037,7 @@ read_count_file (void)
goto mismatch;
if (!fn->counts)
fn->counts = xcalloc (fn->num_counts, sizeof (gcov_type));
fn->counts = XCNEWVEC (gcov_type, fn->num_counts);
for (ix = 0; ix != fn->num_counts; ix++)
fn->counts[ix] += gcov_read_counter ();
@ -1417,7 +1417,7 @@ static char *
make_gcov_file_name (const char *input_name, const char *src_name)
{
char *cptr;
char *name = xmalloc (strlen (src_name) + strlen (input_name) + 10);
char *name = XNEWVEC (char, strlen (src_name) + strlen (input_name) + 10);
name[0] = 0;
if (flag_long_names && strcmp (src_name, input_name))

View File

@ -3405,7 +3405,7 @@ one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
local_cprop_pass (cprop_jumps);
/* Determine implicit sets. */
implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
implicit_sets = XCNEWVEC (rtx, last_basic_block);
find_implicit_sets ();
alloc_hash_table (max_cuid, &set_hash_table, 1);
@ -3936,7 +3936,7 @@ static int
pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
{
int rval;
char *visited = xcalloc (last_basic_block, 1);
char *visited = XCNEWVEC (char, last_basic_block);
rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
@ -4499,7 +4499,7 @@ pre_gcse (void)
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
index_map[expr->bitmap_index] = expr;
@ -4790,7 +4790,7 @@ hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb,
if (visited == NULL)
{
visited_allocated_locally = 1;
visited = xcalloc (last_basic_block, 1);
visited = XCNEWVEC (char, last_basic_block);
}
FOR_EACH_EDGE (pred, ei, bb->preds)
@ -4842,7 +4842,7 @@ hoist_code (void)
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
for (i = 0; i < expr_hash_table.size; i++)
for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
index_map[expr->bitmap_index] = expr;
@ -5071,7 +5071,7 @@ ldst_entry (rtx x)
if (*slot)
return (struct ls_expr *)*slot;
ptr = xmalloc (sizeof (struct ls_expr));
ptr = XNEW (struct ls_expr);
ptr->next = pre_ldst_mems;
ptr->expr = NULL;
@ -5752,8 +5752,8 @@ compute_store_table (void)
pre_ldst_mems = 0;
pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
pre_ldst_expr_eq, NULL);
last_set_in = xcalloc (max_gcse_regno, sizeof (int));
already_set = xmalloc (sizeof (int) * max_gcse_regno);
last_set_in = XCNEWVEC (int, max_gcse_regno);
already_set = XNEWVEC (int, max_gcse_regno);
/* Find all the stores we care about. */
FOR_EACH_BB (bb)
@ -6094,7 +6094,7 @@ build_store_vectors (void)
transp = sbitmap_vector_alloc (last_basic_block, num_stores);
sbitmap_vector_zero (transp, last_basic_block);
regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
regs_set_in_block = XNEWVEC (int, max_gcse_regno);
FOR_EACH_BB (bb)
{
@ -6242,7 +6242,7 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
rtx last, insn, note;
rtx mem = smexpr->pattern;
stack = xmalloc (sizeof (edge_iterator) * n_basic_blocks);
stack = XNEWVEC (edge_iterator, n_basic_blocks);
sp = 0;
ei = ei_start (bb->succs);

View File

@ -1647,7 +1647,7 @@ simplify_cond (rtx exp, int insn_code, int insn_index)
rtx defval = XEXP (exp, 1);
rtx new_defval = XEXP (exp, 1);
int len = XVECLEN (exp, 0);
rtx *tests = xmalloc (len * sizeof (rtx));
rtx *tests = XNEWVEC (rtx, len);
int allsame = 1;
rtx ret;
@ -2773,13 +2773,12 @@ optimize_attrs (void)
return;
/* Make 2 extra elements, for "code" values -2 and -1. */
insn_code_values = xcalloc ((insn_code_number + 2),
sizeof (struct attr_value_list *));
insn_code_values = XCNEWVEC (struct attr_value_list *, insn_code_number + 2);
/* Offset the table address so we can index by -2 or -1. */
insn_code_values += 2;
iv = ivbuf = xmalloc (num_insn_ents * sizeof (struct attr_value_list));
iv = ivbuf = XNEWVEC (struct attr_value_list, num_insn_ents);
for (i = 0; i < MAX_ATTRS_INDEX; i++)
for (attr = attrs[i]; attr; attr = attr->next)

View File

@ -3791,7 +3791,7 @@ initiate_states (void)
int i;
if (description->units_num)
units_array = xmalloc (description->units_num * sizeof (unit_decl_t));
units_array = XNEWVEC (unit_decl_t, description->units_num);
else
units_array = 0;
@ -6162,7 +6162,7 @@ static void
process_state_for_insn_equiv_partition (state_t state)
{
arc_t arc;
arc_t *insn_arcs_array = xcalloc (description->insns_num, sizeof(arc_t));
arc_t *insn_arcs_array = XCNEWVEC (arc_t, description->insns_num);
/* Process insns of the arcs. */
for (arc = first_out_arc (state); arc != NULL; arc = next_out_arc (arc))
@ -6292,7 +6292,7 @@ units_to_automata_heuristic_distr (void)
if (description->units_num == 0)
return;
estimation_bound = estimate_one_automaton_bound ();
unit_decls = xmalloc (description->units_num * sizeof (unit_decl_t));
unit_decls = XNEWVEC (unit_decl_t, description->units_num);
for (i = 0, j = 0; i < description->decls_num; i++)
if (description->decls[i]->mode == dm_unit)

View File

@ -341,7 +341,7 @@ gen_insn (rtx insn, int lineno)
if (i != XVECLEN (insn, 1) - 1)
{
struct clobber_pat *p;
struct clobber_ent *link = xmalloc (sizeof (struct clobber_ent));
struct clobber_ent *link = XNEW (struct clobber_ent);
int j;
link->code_number = insn_code_number;
@ -377,7 +377,7 @@ gen_insn (rtx insn, int lineno)
if (p == 0)
{
p = xmalloc (sizeof (struct clobber_pat));
p = XNEW (struct clobber_pat);
p->insns = 0;
p->pattern = insn;
@ -599,7 +599,7 @@ gen_split (rtx split)
max_operand_vec (split, 2);
operands = MAX (max_opno, MAX (max_dup_opno, max_scratch_opno)) + 1;
unused = (operands == 0 ? " ATTRIBUTE_UNUSED" : "");
used = xcalloc (1, operands);
used = XCNEWVEC (char, operands);
/* Output the prototype, function name and argument declarations. */
if (GET_CODE (split) == DEFINE_PEEPHOLE2)

View File

@ -208,7 +208,7 @@ static char *
VEC_char_to_string (VEC(char,heap) *v)
{
size_t n = VEC_length (char, v);
char *s = xmalloc (n + 1);
char *s = XNEWVEC (char, n + 1);
memcpy (s, VEC_address (char, v), n);
s[n] = '\0';
return s;
@ -419,7 +419,7 @@ main (int argc, char **argv)
else if (GET_CODE (desc) == DEFINE_PEEPHOLE)
{
struct code_ptr *link = xmalloc (sizeof (struct code_ptr));
struct code_ptr *link = XNEW (struct code_ptr);
link->insn_code = insn_code_number;
link->next = peepholes;

View File

@ -798,7 +798,7 @@ validate_insn_operands (struct data *d)
static void
gen_insn (rtx insn, int lineno)
{
struct data *d = xmalloc (sizeof (struct data));
struct data *d = XNEW (struct data);
int i;
d->code_number = next_code_number;
@ -840,7 +840,7 @@ gen_insn (rtx insn, int lineno)
static void
gen_peephole (rtx peep, int lineno)
{
struct data *d = xmalloc (sizeof (struct data));
struct data *d = XNEW (struct data);
int i;
d->code_number = next_code_number;
@ -879,7 +879,7 @@ gen_peephole (rtx peep, int lineno)
static void
gen_expand (rtx insn, int lineno)
{
struct data *d = xmalloc (sizeof (struct data));
struct data *d = XNEW (struct data);
int i;
d->code_number = next_code_number;
@ -923,7 +923,7 @@ gen_expand (rtx insn, int lineno)
static void
gen_split (rtx split, int lineno)
{
struct data *d = xmalloc (sizeof (struct data));
struct data *d = XNEW (struct data);
int i;
d->code_number = next_code_number;
@ -1034,7 +1034,7 @@ strip_whitespace (const char *s)
if (s == 0)
return 0;
p = q = xmalloc (strlen (s) + 1);
p = q = XNEWVEC (char, strlen (s) + 1);
while ((ch = *s++) != '\0')
if (! ISSPACE (ch))
*p++ = ch;

View File

@ -511,7 +511,7 @@ new_decision_test (enum decision_type type, struct decision_test ***pplace)
struct decision_test **place = *pplace;
struct decision_test *test;
test = xmalloc (sizeof (*test));
test = XNEW (struct decision_test);
test->next = *place;
test->type = type;
*place = test;

View File

@ -468,7 +468,7 @@ gt_pch_save (FILE *f)
ggc_pch_this_base (state.d, mmi.preferred_base);
state.ptrs = xmalloc (state.count * sizeof (*state.ptrs));
state.ptrs = XNEWVEC (struct ptr_data *, state.count);
state.ptrs_i = 0;
htab_traverse (saving_htab, call_alloc, &state);
qsort (state.ptrs, state.count, sizeof (*state.ptrs), compare_ptr_data);
@ -865,7 +865,7 @@ ggc_record_overhead (size_t allocated, size_t overhead, void *ptr,
const char *name, int line, const char *function)
{
struct loc_descriptor *loc = loc_descriptor (name, line, function);
struct ptr_hash_entry *p = xmalloc (sizeof (struct ptr_hash_entry));
struct ptr_hash_entry *p = XNEW (struct ptr_hash_entry);
PTR *slot;
p->ptr = ptr;

View File

@ -628,7 +628,7 @@ found:
L2 = LOOKUP_L2 (p);
if (base[L1] == NULL)
base[L1] = xcalloc (PAGE_L2_SIZE, sizeof (page_entry *));
base[L1] = XCNEWVEC (page_entry *, PAGE_L2_SIZE);
base[L1][L2] = entry;
}
@ -1346,7 +1346,7 @@ ggc_free (void *p)
the data, but instead verify that the data is *actually* not
reachable the next time we collect. */
{
struct free_object *fo = xmalloc (sizeof (struct free_object));
struct free_object *fo = XNEW (struct free_object);
fo->object = p;
fo->next = G.free_object_list;
G.free_object_list = fo;
@ -1472,7 +1472,7 @@ init_ggc (void)
}
/* We have a good page, might as well hold onto it... */
e = xcalloc (1, sizeof (struct page_entry));
e = XCNEW (struct page_entry);
e->bytes = G.pagesize;
e->page = p;
e->next = G.free_pages;
@ -1518,12 +1518,12 @@ init_ggc (void)
G.depth_in_use = 0;
G.depth_max = 10;
G.depth = xmalloc (G.depth_max * sizeof (unsigned int));
G.depth = XNEWVEC (unsigned int, G.depth_max);
G.by_depth_in_use = 0;
G.by_depth_max = INITIAL_PTE_COUNT;
G.by_depth = xmalloc (G.by_depth_max * sizeof (page_entry *));
G.save_in_use = xmalloc (G.by_depth_max * sizeof (unsigned long *));
G.by_depth = XNEWVEC (page_entry *, G.by_depth_max);
G.save_in_use = XNEWVEC (unsigned long *, G.by_depth_max);
}
/* Start a new GGC zone. */
@ -2013,7 +2013,7 @@ struct ggc_pch_data
struct ggc_pch_data *
init_ggc_pch (void)
{
return xcalloc (sizeof (struct ggc_pch_data), 1);
return XCNEW (struct ggc_pch_data);
}
void
@ -2161,8 +2161,8 @@ move_ptes_to_front (int count_old_page_tables, int count_new_page_tables)
page_entry **new_by_depth;
unsigned long **new_save_in_use;
new_by_depth = xmalloc (G.by_depth_max * sizeof (page_entry *));
new_save_in_use = xmalloc (G.by_depth_max * sizeof (unsigned long *));
new_by_depth = XNEWVEC (page_entry *, G.by_depth_max);
new_save_in_use = XNEWVEC (unsigned long *, G.by_depth_max);
memcpy (&new_by_depth[0],
&G.by_depth[count_old_page_tables],

View File

@ -442,14 +442,14 @@ global_alloc (FILE *file)
/* Establish mappings from register number to allocation number
and vice versa. In the process, count the allocnos. */
reg_allocno = xmalloc (max_regno * sizeof (int));
reg_allocno = XNEWVEC (int, max_regno);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
reg_allocno[i] = -1;
/* Initialize the shared-hard-reg mapping
from the list of pairs that may share. */
reg_may_share = xcalloc (max_regno, sizeof (int));
reg_may_share = XCNEWVEC (int, max_regno);
for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
{
int r1 = REGNO (XEXP (x, 0));
@ -480,7 +480,7 @@ global_alloc (FILE *file)
else
reg_allocno[i] = -1;
allocno = xcalloc (max_allocno, sizeof (struct allocno));
allocno = XCNEWVEC (struct allocno, max_allocno);
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
if (reg_allocno[i] >= 0)
@ -528,9 +528,9 @@ global_alloc (FILE *file)
/* We used to use alloca here, but the size of what it would try to
allocate would occasionally cause it to exceed the stack limit and
cause unpredictable core dumps. Some examples were > 2Mb in size. */
conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
conflicts = XCNEWVEC (INT_TYPE, max_allocno * allocno_row_words);
allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
allocnos_live = XNEWVEC (INT_TYPE, allocno_row_words);
/* If there is work to be done (at least one reg to allocate),
perform global conflict analysis and allocate the regs. */
@ -567,7 +567,7 @@ global_alloc (FILE *file)
/* Determine the order to allocate the remaining pseudo registers. */
allocno_order = xmalloc (max_allocno * sizeof (int));
allocno_order = XNEWVEC (int, max_allocno);
for (i = 0; i < (size_t) max_allocno; i++)
allocno_order[i] = i;
@ -678,9 +678,9 @@ global_conflicts (void)
int *block_start_allocnos;
/* Make a vector that mark_reg_{store,clobber} will store in. */
regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
regs_set = XNEWVEC (rtx, max_parallel * 2);
block_start_allocnos = xmalloc (max_allocno * sizeof (int));
block_start_allocnos = XNEWVEC (int, max_allocno);
FOR_EACH_BB (b)
{
@ -949,7 +949,7 @@ prune_preferences (void)
{
int i;
int num;
int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
int *allocno_to_order = XNEWVEC (int, max_allocno);
/* Scan least most important to most important.
For each allocno, remove from preferences registers that cannot be used,
@ -2300,7 +2300,7 @@ set_up_bb_rts_numbers (void)
int i;
int *rts_order;
rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
rts_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
post_order_compute (rts_order, false);
for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;

View File

@ -247,9 +247,9 @@ print_rtl_graph_with_bb (const char *base, rtx rtx_first)
{
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
int max_uid = get_max_uid ();
int *start = xmalloc (max_uid * sizeof (int));
int *end = xmalloc (max_uid * sizeof (int));
enum bb_state *in_bb_p = xmalloc (max_uid * sizeof (enum bb_state));
int *start = XNEWVEC (int, max_uid);
int *end = XNEWVEC (int, max_uid);
enum bb_state *in_bb_p = XNEWVEC (enum bb_state, max_uid);
basic_block bb;
int i;

View File

@ -1875,14 +1875,13 @@ schedule_block (int b, int rgn_n_insns)
/* Allocate the ready list. */
ready.veclen = rgn_n_insns + 1 + issue_rate;
ready.first = ready.veclen - 1;
ready.vec = xmalloc (ready.veclen * sizeof (rtx));
ready.vec = XNEWVEC (rtx, ready.veclen);
ready.n_ready = 0;
/* It is used for first cycle multipass scheduling. */
temp_state = alloca (dfa_state_size);
ready_try = xcalloc ((rgn_n_insns + 1), sizeof (char));
choice_stack = xmalloc ((rgn_n_insns + 1)
* sizeof (struct choice_entry));
ready_try = XCNEWVEC (char, rgn_n_insns + 1);
choice_stack = XNEWVEC (struct choice_entry, rgn_n_insns + 1);
for (i = 0; i <= rgn_n_insns; i++)
choice_stack[i].state = xmalloc (dfa_state_size);
@ -2253,7 +2252,7 @@ sched_init (FILE *dump_file)
pseudos which do not cross calls. */
old_max_uid = get_max_uid () + 1;
h_i_d = xcalloc (old_max_uid, sizeof (*h_i_d));
h_i_d = XCNEWVEC (struct haifa_insn_data, old_max_uid);
for (i = 0; i < old_max_uid; i++)
h_i_d [i].cost = -1;
@ -2295,7 +2294,7 @@ sched_init (FILE *dump_file)
{
rtx line;
line_note_head = xcalloc (last_basic_block, sizeof (rtx));
line_note_head = XCNEWVEC (rtx, last_basic_block);
/* Save-line-note-head:
Determine the line-number at the start of each basic block.

View File

@ -352,7 +352,7 @@ static inline void
ipcp_formal_create (struct cgraph_node *mt)
{
IPA_NODE_REF (mt)->ipcp_cval =
xcalloc (ipa_method_formal_count (mt), sizeof (struct ipcp_formal));
XCNEWVEC (struct ipcp_formal, ipa_method_formal_count (mt));
}
/* Set cval structure of I-th formal of MT to CVAL. */
@ -853,7 +853,7 @@ ipcp_replace_map_create (enum cvalue_type type, tree parm_tree,
struct ipa_replace_map *replace_map;
tree const_val;
replace_map = xcalloc (1, sizeof (struct ipa_replace_map));
replace_map = XCNEW (struct ipa_replace_map);
gcc_assert (ipcp_type_is_const (type));
if (type == CONST_VALUE_REF )
{

View File

@ -869,7 +869,7 @@ cgraph_decide_inlining (void)
struct cgraph_node *node;
int nnodes;
struct cgraph_node **order =
xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
int old_insns = 0;
int i;
@ -1138,7 +1138,7 @@ cgraph_early_inlining (void)
struct cgraph_node *node;
int nnodes;
struct cgraph_node **order =
xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
int i;
if (sorrycount || errorcount)

View File

@ -167,7 +167,7 @@ static inline void
ipa_method_tree_map_create (struct cgraph_node *mt)
{
IPA_NODE_REF (mt)->ipa_param_tree =
xcalloc (ipa_method_formal_count (mt), sizeof (tree));
XCNEWVEC (tree, ipa_method_formal_count (mt));
}
/* Create modify structure for MT. */
@ -175,7 +175,7 @@ static inline void
ipa_method_modify_create (struct cgraph_node *mt)
{
((struct ipa_node *) mt->aux)->ipa_mod =
xcalloc (ipa_method_formal_count (mt), sizeof (bool));
XCNEWVEC (bool, ipa_method_formal_count (mt));
}
/* Set modify of I-th formal of MT to VAL. */
@ -388,7 +388,7 @@ static inline void
ipa_callsite_param_map_create (struct cgraph_edge *cs)
{
IPA_EDGE_REF (cs)->ipa_param_map =
xcalloc (ipa_callsite_param_count (cs), sizeof (struct ipa_jump_func));
XCNEWVEC (struct ipa_jump_func, ipa_callsite_param_count (cs));
}
/* Return the call expr tree related to callsite CS. */

View File

@ -490,7 +490,7 @@ scan_function (tree *tp,
static void
analyze_function (struct cgraph_node *fn)
{
funct_state l = xcalloc (1, sizeof (struct funct_state_d));
funct_state l = XCNEW (struct funct_state_d);
tree decl = fn->decl;
struct ipa_dfs_info * w_info = fn->aux;
@ -578,7 +578,7 @@ static_execute (void)
struct cgraph_node *node;
struct cgraph_node *w;
struct cgraph_node **order =
xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
int order_pos = order_pos = ipa_utils_reduced_inorder (order, true, false);
int i;
struct ipa_dfs_info * w_info;

View File

@ -200,7 +200,7 @@ compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
static tree
discover_unique_type (tree type)
{
struct type_brand_s * brand = xmalloc (sizeof (struct type_brand_s));
struct type_brand_s * brand = XNEW (struct type_brand_s);
int i = 0;
splay_tree_node result;

View File

@ -161,7 +161,7 @@ ipa_utils_reduced_inorder (struct cgraph_node **order,
struct cgraph_node *node;
struct searchc_env env;
splay_tree_node result;
env.stack = xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
env.stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
env.stack_size = 0;
env.result = order;
env.order_pos = 0;

View File

@ -36,7 +36,7 @@ cgraph_postorder (struct cgraph_node **order)
struct cgraph_edge *edge, last;
struct cgraph_node **stack =
xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
/* We have to deal with cycles nicely, so use a depth first traversal
output algorithm. Ignore the fact that some functions won't need

View File

@ -37,7 +37,7 @@ buffer_grow (struct buffer *bufp, int size)
{
if (size < 120)
size = 120;
bufp->data = xmalloc (size);
bufp->data = XNEWVEC (unsigned char, size);
bufp->ptr = bufp->data;
}
else

View File

@ -156,7 +156,7 @@ add_class_flag (class_flag_node **rootp, const char *ident, int value)
if (NULL == root)
{
root = xmalloc (sizeof (class_flag_node));
root = XNEW (class_flag_node);
root->ident = "";
root->value = 0;
root->sibling = NULL;
@ -183,7 +183,7 @@ add_class_flag (class_flag_node **rootp, const char *ident, int value)
else
{
/* Insert new node into the tree. */
node = xmalloc (sizeof (class_flag_node));
node = XNEW (class_flag_node);
node->ident = xstrdup (ident);
node->value = value;

View File

@ -211,7 +211,7 @@ split_range (struct eh_range *range, int pc)
}
/* Create a new range. */
h = xmalloc (sizeof (struct eh_range));
h = XNEW (struct eh_range);
h->start_pc = pc;
h->end_pc = range->end_pc;
@ -289,7 +289,7 @@ add_handler (int start_pc, int end_pc, tree handler, tree type)
}
/* Create the new range. */
h = xmalloc (sizeof (struct eh_range));
h = XNEW (struct eh_range);
first_child = &h->first_child;
h->start_pc = start_pc;

View File

@ -600,7 +600,7 @@ cxx_keyword_subst (const unsigned char *str, int length)
are `$'. */
if (i == length)
{
char *dup = xmalloc (2 + length - min_length + kwl);
char *dup = XNEWVEC (char, 2 + length - min_length + kwl);
strcpy (dup, cxx_keywords[mid]);
for (i = kwl; i < length + 1; ++i)
dup[i] = '$';
@ -719,7 +719,7 @@ get_field_name (JCF *jcf, int name_index, JCF_u2 flags)
return NULL;
}
override = xmalloc (length + 3);
override = XNEWVEC (char, length + 3);
memcpy (override, name, length);
strcpy (override + length, "__");
}
@ -929,13 +929,13 @@ print_method_info (FILE *stream, JCF* jcf, int name_index, int sig_index,
{
struct method_name *nn;
nn = xmalloc (sizeof (struct method_name));
nn->name = xmalloc (length);
nn = XNEW (struct method_name);
nn->name = XNEWVEC (unsigned char, length);
memcpy (nn->name, str, length);
nn->length = length;
nn->next = method_name_list;
nn->sig_length = JPOOL_UTF_LENGTH (jcf, sig_index);
nn->signature = xmalloc (nn->sig_length);
nn->signature = XNEWVEC (unsigned char, nn->sig_length);
nn->is_native = METHOD_IS_NATIVE (flags);
memcpy (nn->signature, JPOOL_UTF_DATA (jcf, sig_index),
nn->sig_length);
@ -1240,7 +1240,7 @@ throwable_p (const unsigned char *clname)
for (length = 0; clname[length] != ';' && clname[length] != '\0'; ++length)
;
current = ALLOC (length + 1);
current = XNEWVEC (unsigned char, length + 1);
for (i = 0; i < length; ++i)
current[i] = clname[i] == '/' ? '.' : clname[i];
current[length] = '\0';
@ -1276,7 +1276,7 @@ throwable_p (const unsigned char *clname)
jcf_parse_class (&jcf);
tmp = (unsigned char *) super_class_name (&jcf, &super_length);
super = ALLOC (super_length + 1);
super = XNEWVEC (unsigned char, super_length + 1);
memcpy (super, tmp, super_length);
super[super_length] = '\0';
@ -1836,8 +1836,8 @@ print_include (FILE *out, const unsigned char *utf8, int len)
return;
}
incl = xmalloc (sizeof (struct include));
incl->name = xmalloc (len + 1);
incl = XNEW (struct include);
incl->name = XNEWVEC (char, len + 1);
strncpy (incl->name, (const char *) utf8, len);
incl->name[len] = '\0';
incl->next = all_includes;
@ -1922,8 +1922,8 @@ add_namelet (const unsigned char *name, const unsigned char *name_limit,
if (n == NULL)
{
n = xmalloc (sizeof (struct namelet));
n->name = xmalloc (p - name + 1);
n = XNEW (struct namelet);
n->name = XNEWVEC (char, p - name + 1);
strncpy (n->name, (const char *) name, p - name);
n->name[p - name] = '\0';
n->is_class = (p == name_limit);
@ -2173,7 +2173,7 @@ process_file (JCF *jcf, FILE *out)
if (len > 6 && ! strcmp (&jcf->classname[len - 6], ".class"))
len -= 6;
/* Turn the class name into a file name. */
name = xmalloc (len + 1);
name = XNEWVEC (char, len + 1);
for (i = 0; i < len; ++i)
name[i] = jcf->classname[i] == '.' ? '/' : jcf->classname[i];
name[i] = '\0';
@ -2484,25 +2484,25 @@ main (int argc, char** argv)
case OPT_PREPEND:
if (prepend_count == 0)
prepend_specs = ALLOC (argc * sizeof (char*));
prepend_specs = XNEWVEC (char *, argc);
prepend_specs[prepend_count++] = optarg;
break;
case OPT_FRIEND:
if (friend_count == 0)
friend_specs = ALLOC (argc * sizeof (char*));
friend_specs = XNEWVEC (char *, argc);
friend_specs[friend_count++] = optarg;
break;
case OPT_ADD:
if (add_count == 0)
add_specs = ALLOC (argc * sizeof (char*));
add_specs = XNEWVEC (char *, argc);
add_specs[add_count++] = optarg;
break;
case OPT_APPEND:
if (append_count == 0)
append_specs = ALLOC (argc * sizeof (char*));
append_specs = XNEWVEC (char *, argc);
append_specs[append_count++] = optarg;
break;
@ -2608,7 +2608,7 @@ main (int argc, char** argv)
{
int dir_len = strlen (output_directory);
int i, classname_length = strlen (classname);
current_output_file = ALLOC (dir_len + classname_length + 5);
current_output_file = XNEWVEC (char, dir_len + classname_length + 5);
strcpy (current_output_file, output_directory);
if (dir_len > 0 && output_directory[dir_len-1] != '/')
current_output_file[dir_len++] = '/';

View File

@ -188,11 +188,11 @@ int
read_zip_member (JCF *jcf, ZipDirectory *zipd, ZipFile *zipf)
{
jcf->filbuf = jcf_unexpected_eof;
jcf->zipd = (void *)zipd;
jcf->zipd = zipd;
if (zipd->compression_method == Z_NO_COMPRESSION)
{
jcf->buffer = ALLOC (zipd->size);
jcf->buffer = XNEWVEC (unsigned char, zipd->size);
jcf->buffer_end = jcf->buffer + zipd->size;
jcf->read_ptr = jcf->buffer;
jcf->read_end = jcf->buffer_end;
@ -208,13 +208,13 @@ read_zip_member (JCF *jcf, ZipDirectory *zipd, ZipFile *zipf)
d_stream.zfree = (free_func) 0;
d_stream.opaque = (voidpf) 0;
jcf->buffer = ALLOC (zipd->uncompressed_size);
jcf->buffer = XNEWVEC (unsigned char, zipd->uncompressed_size);
d_stream.next_out = jcf->buffer;
d_stream.avail_out = zipd->uncompressed_size;
jcf->buffer_end = jcf->buffer + zipd->uncompressed_size;
jcf->read_ptr = jcf->buffer;
jcf->read_end = jcf->buffer_end;
buffer = ALLOC (zipd->size);
buffer = XNEWVEC (char, zipd->size);
d_stream.next_in = (unsigned char *) buffer;
d_stream.avail_in = zipd->size;
if (lseek (zipf->fd, zipd->filestart, 0) < 0
@ -225,7 +225,7 @@ read_zip_member (JCF *jcf, ZipDirectory *zipd, ZipFile *zipf)
inflateInit2 (&d_stream, -MAX_WBITS);
inflate (&d_stream, Z_NO_FLUSH);
inflateEnd (&d_stream);
FREE (buffer);
free (buffer);
}
return 0;
@ -246,7 +246,7 @@ open_class (const char *filename, JCF *jcf, int fd, const char *dep_name)
if (dep_name != NULL)
jcf_dependency_add_file (dep_name, 0);
JCF_ZERO (jcf);
jcf->buffer = ALLOC (stat_buf.st_size);
jcf->buffer = XNEWVEC (unsigned char, stat_buf.st_size);
jcf->buffer_end = jcf->buffer + stat_buf.st_size;
jcf->read_ptr = jcf->buffer;
jcf->read_end = jcf->buffer_end;
@ -383,8 +383,7 @@ caching_stat (char *filename, struct stat *buf)
if (!*slot)
{
/* We have not already scanned this directory; scan it now. */
dent = ((memoized_dirlist_entry *)
ALLOC (sizeof (memoized_dirlist_entry)));
dent = XNEW (memoized_dirlist_entry);
dent->dir = xstrdup (filename);
/* Unfortunately, scandir is not fully standardized. In
particular, the type of the function pointer passed as the

View File

@ -160,7 +160,7 @@ set_source_filename (JCF *jcf, int index)
/* Length of prefix, not counting final dot. */
int i = dot - class_name;
/* Concatenate current package prefix with new sfname. */
char *buf = xmalloc (i + new_len + 2); /* Space for '.' and '\0'. */
char *buf = XNEWVEC (char, i + new_len + 2); /* Space for '.' and '\0'. */
strcpy (buf + i + 1, sfname);
/* Copy package from class_name, replacing '.' by DIR_SEPARATOR.
Note we start at the end with the final package dot. */
@ -1084,7 +1084,7 @@ java_parse_file (int set_yydebug ATTRIBUTE_UNUSED)
finput = fopen (main_input_filename, "r");
if (finput == NULL)
fatal_error ("can't open %s: %m", input_filename);
list = xmalloc(avail);
list = XNEWVEC (char, avail);
next = list;
for (;;)
{
@ -1351,7 +1351,7 @@ compute_class_name (struct ZipDirectory *zdir)
}
filename_length -= strlen (".class");
class_name = ALLOC (filename_length + 1);
class_name = XNEWVEC (char, filename_length + 1);
memcpy (class_name, class_name_in_zip_dir, filename_length);
class_name [filename_length] = '\0';
@ -1455,7 +1455,7 @@ parse_zip_file_entries (void)
if (TYPE_SIZE (current_class) != error_mark_node)
{
parse_class_file ();
FREE (current_jcf->buffer); /* No longer necessary */
free (current_jcf->buffer); /* No longer necessary */
/* Note: there is a way to free this buffer right after a
class seen in a zip file has been parsed. The idea is the
set its jcf in such a way that buffer will be reallocated
@ -1468,11 +1468,11 @@ parse_zip_file_entries (void)
{
char *file_name, *class_name_in_zip_dir, *buffer;
JCF *jcf;
file_name = ALLOC (zdir->filename_length + 1);
file_name = XNEWVEC (char, zdir->filename_length + 1);
class_name_in_zip_dir = ZIPDIR_FILENAME (zdir);
strncpy (file_name, class_name_in_zip_dir, zdir->filename_length);
file_name[zdir->filename_length] = '\0';
jcf = ALLOC (sizeof (JCF));
jcf = XNEW (JCF);
JCF_ZERO (jcf);
jcf->read_state = finput;
jcf->filbuf = jcf_filbuf_from_stdio;
@ -1484,7 +1484,7 @@ parse_zip_file_entries (void)
if (read_zip_member (jcf, zdir, localToFile) < 0)
fatal_error ("error while reading %s from zip file", file_name);
buffer = ALLOC (zdir->filename_length + 1 +
buffer = XNEWVEC (char, zdir->filename_length + 1 +
(jcf->buffer_end - jcf->buffer));
strcpy (buffer, file_name);
/* This is not a typo: we overwrite the trailing \0 of the
@ -1495,8 +1495,8 @@ parse_zip_file_entries (void)
compile_resource_data (file_name, buffer,
jcf->buffer_end - jcf->buffer);
JCF_FINISH (jcf);
FREE (jcf);
FREE (buffer);
free (jcf);
free (buffer);
}
break;
@ -1529,7 +1529,7 @@ process_zip_dir (FILE *finput)
continue;
class_name = compute_class_name (zdir);
file_name = ALLOC (zdir->filename_length+1);
file_name = XNEWVEC (char, zdir->filename_length+1);
jcf = ggc_alloc (sizeof (JCF));
JCF_ZERO (jcf);

View File

@ -132,7 +132,7 @@ add_entry (struct entry **entp, const char *filename, int is_system)
int len;
struct entry *n;
n = ALLOC (sizeof (struct entry));
n = XNEW (struct entry);
n->flags = is_system ? FLAG_SYSTEM : 0;
n->next = NULL;

View File

@ -1729,7 +1729,7 @@ generate_bytecode_insns (tree exp, int target, struct jcf_partial *state)
unsigned HOST_WIDE_INT delta;
/* Copy the chain of relocs into a sorted array. */
struct jcf_relocation **relocs
= xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
= XNEWVEC (struct jcf_relocation *, sw_state.num_cases);
/* The relocs arrays is a buffer with a gap.
The assumption is that cases will normally come in "runs". */
int gap_start = 0;
@ -3473,7 +3473,7 @@ make_class_file_name (tree clas)
slash = dname + strlen (dname);
}
r = xmalloc (slash - dname + strlen (cname) + 2);
r = XNEWVEC (char, slash - dname + strlen (cname) + 2);
strncpy (r, dname, slash - dname);
r[slash - dname] = sep;
strcpy (&r[slash - dname + 1], cname);

View File

@ -165,9 +165,9 @@ typedef struct JCF GTY(()) {
#define JCF_FINISH(JCF) { \
CPOOL_FINISH(&(JCF)->cpool); \
if ((JCF)->buffer) FREE ((JCF)->buffer); \
if ((JCF)->filename) FREE ((char *) (JCF)->filename); \
if ((JCF)->classname) FREE ((char *) (JCF)->classname); \
if ((JCF)->buffer) free ((JCF)->buffer); \
if ((JCF)->filename) free ((char *) (JCF)->filename); \
if ((JCF)->classname) free ((char *) (JCF)->classname); \
(JCF)->finished = 1; }
#define CPOOL_INIT(CPOOL) \

View File

@ -90,7 +90,7 @@ find_spec_file (const char *dir)
int x;
struct stat sb;
spec = xmalloc (strlen (dir) + sizeof (SPEC_FILE)
spec = XNEWVEC (char, strlen (dir) + sizeof (SPEC_FILE)
+ sizeof ("-specs=") + 4);
strcpy (spec, "-specs=");
x = strlen (spec);
@ -247,7 +247,7 @@ lang_specific_driver (int *in_argc, const char *const **in_argv,
argv = *in_argv;
added_libraries = *in_added_libraries;
args = xcalloc (argc, sizeof (int));
args = XCNEWVEC (int, argc);
for (i = 1; i < argc; i++)
{
@ -490,7 +490,7 @@ lang_specific_driver (int *in_argc, const char *const **in_argv,
num_args += shared_libgcc;
arglist = xmalloc ((num_args + 1) * sizeof (char *));
arglist = XNEWVEC (const char *, num_args + 1);
j = 0;
arglist[j++] = argv[0];

View File

@ -401,7 +401,7 @@ put_decl_string (const char *str, int len)
if (decl_buf == NULL)
{
decl_buflen = len + 100;
decl_buf = xmalloc (decl_buflen);
decl_buf = XNEWVEC (char, decl_buflen);
}
else
{
@ -631,7 +631,7 @@ java_post_options (const char **pfilename)
error ("couldn't determine target name for dependency tracking");
else
{
char *buf = xmalloc (dot - filename +
char *buf = XNEWVEC (char, dot - filename +
3 + sizeof (TARGET_OBJECT_SUFFIX));
strncpy (buf, filename, dot - filename);

View File

@ -162,7 +162,7 @@ java_sprint_unicode (int c)
java_lexer *
java_new_lexer (FILE *finput, const char *encoding)
{
java_lexer *lex = xmalloc (sizeof (java_lexer));
java_lexer *lex = XNEW (java_lexer);
int enc_error = 0;
lex->finput = finput;

View File

@ -200,7 +200,7 @@ extern void java_destroy_lexer (java_lexer *);
/* Special ways to report error on numeric literals */
#define JAVA_FLOAT_RANGE_ERROR(m) \
{ \
char *msg = xmalloc (100 + strlen (m)); \
char *msg = XNEWVEC (char, 100 + strlen (m)); \
sprintf (msg, "Floating point literal exceeds range of `%s'", (m)); \
JAVA_RANGE_ERROR(msg); \
free (msg); \

View File

@ -105,7 +105,7 @@ struct method_declarator {
};
#define NEW_METHOD_DECLARATOR(D,N,A) \
{ \
(D) = xmalloc (sizeof (struct method_declarator)); \
(D) = XNEW (struct method_declarator); \
(D)->method_name = (N); \
(D)->args = (A); \
}
@ -498,7 +498,7 @@ formal_parameter:
if (bracket_count)
{
int i;
char *n = xmalloc (bracket_count + 1 + strlen ($$));
char *n = XNEWVEC (char, bracket_count + 1 + strlen ($$));
for (i = 0; i < bracket_count; ++i)
n[i] = '[';
strcpy (n + bracket_count, $$);
@ -512,7 +512,7 @@ formal_parameter:
if (bracket_count)
{
int i;
char *n = xmalloc (bracket_count + 1 + strlen ($2));
char *n = XNEWVEC (char, bracket_count + 1 + strlen ($2));
for (i = 0; i < bracket_count; ++i)
n[i] = '[';
strcpy (n + bracket_count, $2);
@ -1175,10 +1175,10 @@ constant_expression:
void
java_push_parser_context (void)
{
struct parser_ctxt *new = xcalloc (1, sizeof (struct parser_ctxt));
struct parser_ctxt *tmp = XCNEW (struct parser_ctxt);
new->next = ctxp;
ctxp = new;
tmp->next = ctxp;
ctxp = tmp;
}
static void
@ -1186,7 +1186,7 @@ push_class_context (const char *name)
{
struct class_context *ctx;
ctx = xmalloc (sizeof (struct class_context));
ctx = XNEW (struct class_context);
ctx->name = (char *) name;
ctx->next = current_class_context;
current_class_context = ctx;

View File

@ -145,7 +145,7 @@ compile_resource_file (const char *name, const char *filename)
perror ("Could not figure length of resource file");
return;
}
buffer = xmalloc (strlen (name) + stat_buf.st_size);
buffer = XNEWVEC (char, strlen (name) + stat_buf.st_size);
strcpy (buffer, name);
read (fd, buffer + strlen (name), stat_buf.st_size);
close (fd);

View File

@ -308,9 +308,8 @@ read_zip_archive (ZipFile *zipf)
}
zipf->count = makeword((const uch *) &buffer[TOTAL_ENTRIES_CENTRAL_DIR]);
zipf->dir_size = makelong((const uch *) &buffer[SIZE_CENTRAL_DIRECTORY]);
#define ALLOC xmalloc
/* Allocate 1 more to allow appending '\0' to last filename. */
zipf->central_directory = ALLOC (zipf->dir_size+1);
zipf->central_directory = XNEWVEC (char, zipf->dir_size + 1);
if (lseek (zipf->fd, -(zipf->dir_size+ECREC_SIZE+4), SEEK_CUR) < 0)
return -2;
if (read (zipf->fd, zipf->central_directory, zipf->dir_size) < 0)

View File

@ -467,7 +467,7 @@ write_global_declarations (void)
tree globals = lang_hooks.decls.getdecls ();
int len = list_length (globals);
tree *vec = xmalloc (sizeof (tree) * len);
tree *vec = XNEWVEC (tree, len);
int i;
tree decl;

View File

@ -107,7 +107,7 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks);
/* We want a maximal solution, so make an optimistic initialization of
ANTIN. */
@ -260,7 +260,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
qin = qout = worklist
= xmalloc (sizeof (basic_block) * n_basic_blocks);
= XNEWVEC (basic_block, n_basic_blocks);
/* Initialize a mapping from each edge to its index. */
for (i = 0; i < num_edges; i++)
@ -485,7 +485,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
qin = qout = worklist =
xmalloc (sizeof (basic_block) * (n_basic_blocks - NUM_FIXED_BLOCKS));
XNEWVEC (basic_block, n_basic_blocks - NUM_FIXED_BLOCKS);
/* We want a maximal solution. */
sbitmap_vector_ones (avout, last_basic_block);
@ -614,7 +614,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
/* Initialize NEARER for each edge and build a mapping from an edge to
its index. */

View File

@ -368,15 +368,15 @@ local_alloc (void)
See the declarations of these variables, above,
for what they mean. */
qty = xmalloc (max_qty * sizeof (struct qty));
qty_phys_copy_sugg = xmalloc (max_qty * sizeof (HARD_REG_SET));
qty_phys_num_copy_sugg = xmalloc (max_qty * sizeof (short));
qty_phys_sugg = xmalloc (max_qty * sizeof (HARD_REG_SET));
qty_phys_num_sugg = xmalloc (max_qty * sizeof (short));
qty = XNEWVEC (struct qty, max_qty);
qty_phys_copy_sugg = XNEWVEC (HARD_REG_SET, max_qty);
qty_phys_num_copy_sugg = XNEWVEC (short, max_qty);
qty_phys_sugg = XNEWVEC (HARD_REG_SET, max_qty);
qty_phys_num_sugg = XNEWVEC (short, max_qty);
reg_qty = xmalloc (max_regno * sizeof (int));
reg_offset = xmalloc (max_regno * sizeof (char));
reg_next_in_qty = xmalloc (max_regno * sizeof (int));
reg_qty = XNEWVEC (int, max_regno);
reg_offset = XNEWVEC (char, max_regno);
reg_next_in_qty = XNEWVEC (int, max_regno);
/* Determine which pseudo-registers can be allocated by local-alloc.
In general, these are the registers used only in a single block and
@ -798,7 +798,7 @@ update_equiv_regs (void)
regset_head cleared_regs;
int clear_regnos = 0;
reg_equiv = xcalloc (max_regno, sizeof *reg_equiv);
reg_equiv = XCNEWVEC (struct equivalence, max_regno);
INIT_REG_SET (&cleared_regs);
reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
reg_equiv_init_size = max_regno;
@ -1306,7 +1306,7 @@ block_alloc (int b)
/* +2 to leave room for a post_mark_life at the last insn and for
the birth of a CLOBBER in the first insn. */
regs_live_at = xcalloc ((2 * insn_count + 2), sizeof (HARD_REG_SET));
regs_live_at = XCNEWVEC (HARD_REG_SET, 2 * insn_count + 2);
/* Initialize table of hardware registers currently live. */
@ -1560,7 +1560,7 @@ block_alloc (int b)
number of suggested registers they need so we allocate those with
the most restrictive needs first. */
qty_order = xmalloc (next_qty * sizeof (int));
qty_order = XNEWVEC (int, next_qty);
for (i = 0; i < next_qty; i++)
qty_order[i] = i;

View File

@ -39,7 +39,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
struct loops *
loop_optimizer_init (FILE *dumpfile)
{
struct loops *loops = xcalloc (1, sizeof (struct loops));
struct loops *loops = XCNEW (struct loops);
edge e;
edge_iterator ei;
static bool first_time = true;

View File

@ -425,7 +425,7 @@ find_or_insert_inv (htab_t eq, rtx expr, enum machine_mode mode,
if (entry)
return entry->inv;
entry = xmalloc (sizeof (struct invariant_expr_entry));
entry = XNEW (struct invariant_expr_entry);
entry->inv = inv;
entry->expr = expr;
entry->mode = mode;
@ -614,7 +614,7 @@ static struct invariant *
create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
bool always_executed)
{
struct invariant *inv = xmalloc (sizeof (struct invariant));
struct invariant *inv = XNEW (struct invariant);
rtx set = single_set (insn);
inv->def = def;
@ -655,7 +655,7 @@ create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
static void
record_use (struct def *def, rtx *use, rtx insn)
{
struct use *u = xmalloc (sizeof (struct use));
struct use *u = XNEW (struct use);
if (GET_CODE (*use) == SUBREG)
use = &SUBREG_REG (*use);
@ -762,7 +762,7 @@ find_invariant_insn (rtx insn, bool always_reached, bool always_executed)
}
if (simple)
def = xcalloc (1, sizeof (struct def));
def = XCNEW (struct def);
else
def = NULL;

View File

@ -783,7 +783,7 @@ get_biv_step (struct df_ref *last_def, rtx reg, rtx *inner_step,
static void
record_iv (struct df_ref *def, struct rtx_iv *iv)
{
struct rtx_iv *recorded_iv = xmalloc (sizeof (struct rtx_iv));
struct rtx_iv *recorded_iv = XNEW (struct rtx_iv);
*recorded_iv = *iv;
DF_REF_IV_SET (def, recorded_iv);
@ -807,7 +807,7 @@ analyzed_for_bivness_p (rtx def, struct rtx_iv *iv)
static void
record_biv (rtx def, struct rtx_iv *iv)
{
struct biv_entry *biv = xmalloc (sizeof (struct biv_entry));
struct biv_entry *biv = XNEW (struct biv_entry);
void **slot = htab_find_slot_with_hash (bivs, def, REGNO (def), INSERT);
biv->regno = REGNO (def);
@ -2680,7 +2680,7 @@ get_simple_loop_desc (struct loop *loop)
if (desc)
return desc;
desc = xmalloc (sizeof (struct niter_desc));
desc = XNEW (struct niter_desc);
iv_analysis_loop_init (loop);
find_simple_exit (loop, desc);
loop->aux = desc;

View File

@ -507,7 +507,7 @@ peel_loop_completely (struct loops *loops, struct loop *loop)
if (desc->noloop_assumptions)
RESET_BIT (wont_exit, 1);
remove_edges = xcalloc (npeel, sizeof (edge));
remove_edges = XCNEWVEC (edge, npeel);
n_remove_edges = 0;
if (flag_split_ivs_in_unroller)
@ -692,7 +692,7 @@ unroll_loop_constant_iterations (struct loops *loops, struct loop *loop)
wont_exit = sbitmap_alloc (max_unroll + 1);
sbitmap_ones (wont_exit);
remove_edges = xcalloc (max_unroll + exit_mod + 1, sizeof (edge));
remove_edges = XCNEWVEC (edge, max_unroll + exit_mod + 1);
n_remove_edges = 0;
if (flag_split_ivs_in_unroller
|| flag_variable_expansion_in_unroller)
@ -964,7 +964,7 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
opt_info = analyze_insns_in_loop (loop);
/* Remember blocks whose dominators will have to be updated. */
dom_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
dom_bbs = XCNEWVEC (basic_block, n_basic_blocks);
n_dom_bbs = 0;
body = get_loop_body (loop);
@ -1022,7 +1022,7 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
/* Precondition the loop. */
loop_split_edge_with (loop_preheader_edge (loop), init_code);
remove_edges = xcalloc (max_unroll + n_peel + 1, sizeof (edge));
remove_edges = XCNEWVEC (edge, max_unroll + n_peel + 1);
n_remove_edges = 0;
wont_exit = sbitmap_alloc (max_unroll + 2);
@ -1613,7 +1613,7 @@ analyze_insn_to_expand_var (struct loop *loop, rtx insn)
return NULL;
/* Record the accumulator to expand. */
ves = xmalloc (sizeof (struct var_to_expand));
ves = XNEW (struct var_to_expand);
ves->insn = insn;
ves->var_expansions = VEC_alloc (rtx, heap, 1);
ves->reg = copy_rtx (dest);
@ -1677,7 +1677,7 @@ analyze_iv_to_split_insn (rtx insn)
return NULL;
/* Record the insn to split. */
ivts = xmalloc (sizeof (struct iv_to_split));
ivts = XNEW (struct iv_to_split);
ivts->insn = insn;
ivts->base_var = NULL_RTX;
ivts->step = iv.step;
@ -1697,7 +1697,7 @@ analyze_insns_in_loop (struct loop *loop)
{
basic_block *body, bb;
unsigned i, num_edges = 0;
struct opt_info *opt_info = xcalloc (1, sizeof (struct opt_info));
struct opt_info *opt_info = XCNEW (struct opt_info);
rtx insn;
struct iv_to_split *ivts = NULL;
struct var_to_expand *ves = NULL;

View File

@ -841,18 +841,18 @@ loop_optimize (rtx f, FILE *dumpfile, int flags)
Leave some space for labels allocated by find_and_verify_loops. */
max_uid_for_loop = get_max_uid () + 1 + max_loop_num * 32;
uid_luid = xcalloc (max_uid_for_loop, sizeof (int));
uid_loop = xcalloc (max_uid_for_loop, sizeof (struct loop *));
uid_luid = XCNEWVEC (int, max_uid_for_loop);
uid_loop = XCNEWVEC (struct loop *, max_uid_for_loop);
/* Allocate storage for array of loops. */
loops->array = xcalloc (loops->num, sizeof (struct loop));
loops->array = XCNEWVEC (struct loop, loops->num);
/* Find and process each loop.
First, find them, and record them in order of their beginnings. */
find_and_verify_loops (f, loops);
/* Allocate and initialize auxiliary loop information. */
loops_info = xcalloc (loops->num, sizeof (struct loop_info));
loops_info = XCNEWVEC (struct loop_info, loops->num);
for (i = 0; i < (int) loops->num; i++)
loops->array[i].aux = loops_info + i;
@ -1328,7 +1328,7 @@ scan_loop (struct loop *loop, int flags)
continue;
}
m = xmalloc (sizeof (struct movable));
m = XNEW (struct movable);
m->next = 0;
m->insn = p;
m->set_src = src;
@ -1416,7 +1416,7 @@ scan_loop (struct loop *loop, int flags)
if (regs->array[regno].set_in_loop == 2)
{
struct movable *m;
m = xmalloc (sizeof (struct movable));
m = XNEW (struct movable);
m->next = 0;
m->insn = p;
m->set_dest = SET_DEST (set);
@ -1853,7 +1853,7 @@ static void
combine_movables (struct loop_movables *movables, struct loop_regs *regs)
{
struct movable *m;
char *matched_regs = xmalloc (regs->num);
char *matched_regs = XNEWVEC (char, regs->num);
enum machine_mode mode;
/* Regs that are set more than once are not allowed to match
@ -2188,8 +2188,8 @@ move_movables (struct loop *loop, struct loop_movables *movables,
/* Map of pseudo-register replacements to handle combining
when we move several insns that load the same value
into different pseudo-registers. */
rtx *reg_map = xcalloc (nregs, sizeof (rtx));
char *already_moved = xcalloc (nregs, sizeof (char));
rtx *reg_map = XCNEWVEC (rtx, nregs);
char *already_moved = XCNEWVEC (char, nregs);
for (m = movables->head; m; m = m->next)
{
@ -2537,8 +2537,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
}
else if (m->insert_temp)
{
rtx *reg_map2 = xcalloc (REGNO (newreg),
sizeof(rtx));
rtx *reg_map2 = XCNEWVEC (rtx, REGNO(newreg));
reg_map2 [m->regno] = newreg;
i1 = loop_insn_hoist (loop, copy_rtx (PATTERN (p)));
@ -6382,7 +6381,7 @@ strength_reduce (struct loop *loop, int flags)
addr_placeholder = gen_reg_rtx (Pmode);
ivs->n_regs = max_reg_before_loop;
ivs->regs = xcalloc (ivs->n_regs, sizeof (struct iv));
ivs->regs = XCNEWVEC (struct iv, ivs->n_regs);
/* Find all BIVs in loop. */
loop_bivs_find (loop);
@ -6431,7 +6430,7 @@ strength_reduce (struct loop *loop, int flags)
Some givs might have been made from biv increments, so look at
ivs->reg_iv_type for a suitable size. */
reg_map_size = ivs->n_regs;
reg_map = xcalloc (reg_map_size, sizeof (rtx));
reg_map = XCNEWVEC (rtx, reg_map_size);
/* Examine each iv class for feasibility of strength reduction/induction
variable elimination. */
@ -6649,7 +6648,7 @@ check_insn_for_bivs (struct loop *loop, rtx p, int not_every_iteration,
/* It is a possible basic induction variable.
Create and initialize an induction structure for it. */
struct induction *v = xmalloc (sizeof (struct induction));
struct induction *v = XNEW (struct induction);
record_biv (loop, v, p, dest_reg, inc_val, mult_val, location,
not_every_iteration, maybe_multiple);
@ -6712,7 +6711,7 @@ check_insn_for_givs (struct loop *loop, rtx p, int not_every_iteration,
&add_val, &mult_val, &ext_val,
&last_consec_insn))))
{
struct induction *v = xmalloc (sizeof (struct induction));
struct induction *v = XNEW (struct induction);
/* If this is a library call, increase benefit. */
if (find_reg_note (p, REG_RETVAL, NULL_RTX))
@ -6828,7 +6827,7 @@ find_mem_givs (const struct loop *loop, rtx x, rtx insn,
GET_MODE (x)))
{
/* Found one; record it. */
struct induction *v = xmalloc (sizeof (struct induction));
struct induction *v = XNEW (struct induction);
record_giv (loop, v, insn, src_reg, addr_placeholder, mult_val,
add_val, ext_val, benefit, DEST_ADDR,
@ -6901,7 +6900,7 @@ record_biv (struct loop *loop, struct induction *v, rtx insn, rtx dest_reg,
{
/* Create and initialize new iv_class. */
bl = xmalloc (sizeof (struct iv_class));
bl = XNEW (struct iv_class);
bl->regno = REGNO (dest_reg);
bl->biv = 0;
@ -9024,8 +9023,8 @@ combine_givs (struct loop_regs *regs, struct iv_class *bl)
if (!g1->ignore)
giv_array[i++] = g1;
stats = xcalloc (giv_count, sizeof (*stats));
can_combine = xcalloc (giv_count, giv_count * sizeof (rtx));
stats = XCNEWVEC (struct combine_givs_stats, giv_count);
can_combine = XCNEWVEC (rtx, giv_count * giv_count);
for (i = 0; i < giv_count; i++)
{
@ -10617,7 +10616,7 @@ loop_regs_scan (const struct loop *loop, int extra_size)
regs->array[i].single_usage = NULL_RTX;
}
last_set = xcalloc (regs->num, sizeof (rtx));
last_set = XCNEWVEC (rtx, regs->num);
/* Scan the loop, recording register usage. */
for (insn = loop->top ? loop->top : loop->start; insn != loop->end;

View File

@ -104,7 +104,7 @@ static struct seginfo *
new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live)
{
struct seginfo *ptr;
ptr = xmalloc (sizeof (struct seginfo));
ptr = XNEW (struct seginfo);
ptr->mode = mode;
ptr->insn_ptr = insn;
ptr->bbnum = bb;
@ -415,7 +415,7 @@ optimize_mode_switching (FILE *file)
entry_exit_extra = 3;
#endif
bb_info[n_entities]
= xcalloc (last_basic_block + entry_exit_extra, sizeof **bb_info);
= XCNEWVEC (struct bb_info, last_basic_block);
entity_map[n_entities++] = e;
if (num_modes[e] > max_num_modes)
max_num_modes = num_modes[e];

View File

@ -900,7 +900,7 @@ canon_loop (struct loop *loop)
static struct loops *
build_loops_structure (FILE *dumpfile)
{
struct loops *loops = xcalloc (1, sizeof (struct loops));
struct loops *loops = XCNEW (struct loops);
/* Find the loops. */
@ -984,7 +984,7 @@ sms_schedule (FILE *dump_file)
/* Allocate memory to hold the DDG array one entry for each loop.
We use loop->num as index into this array. */
g_arr = xcalloc (loops->num, sizeof (ddg_ptr));
g_arr = XCNEWVEC (ddg_ptr, loops->num);
/* Build DDGs for all the relevant loops and hold them in G_ARR
@ -1172,7 +1172,7 @@ sms_schedule (FILE *dump_file)
fprintf (stats_file, "\n");
}
node_order = (int *) xmalloc (sizeof (int) * g->num_nodes);
node_order = XNEWVEC (int, g->num_nodes);
mii = 1; /* Need to pass some estimate of mii. */
rec_mii = sms_order_nodes (g, mii, node_order);
@ -2037,8 +2037,7 @@ order_nodes_in_scc (ddg_ptr g, sbitmap nodes_ordered, sbitmap scc,
static partial_schedule_ptr
create_partial_schedule (int ii, ddg_ptr g, int history)
{
partial_schedule_ptr ps = (partial_schedule_ptr)
xmalloc (sizeof (struct partial_schedule));
partial_schedule_ptr ps = XNEW (struct partial_schedule);
ps->rows = (ps_insn_ptr *) xcalloc (ii, sizeof (ps_insn_ptr));
ps->ii = ii;
ps->history = history;
@ -2125,7 +2124,7 @@ print_partial_schedule (partial_schedule_ptr ps, FILE *dump)
static ps_insn_ptr
create_ps_insn (ddg_node_ptr node, int rest_count, int cycle)
{
ps_insn_ptr ps_i = xmalloc (sizeof (struct ps_insn));
ps_insn_ptr ps_i = XNEW (struct ps_insn);
ps_i->node = node;
ps_i->next_in_row = NULL;

View File

@ -9048,7 +9048,7 @@ objc_demangle (const char *mangled)
(mangled[1] == 'i' || mangled[1] == 'c') &&
mangled[2] == '_')
{
cp = demangled = xmalloc(strlen(mangled) + 2);
cp = demangled = XNEWVEC (char, strlen(mangled) + 2);
if (mangled[1] == 'i')
*cp++ = '-'; /* for instance method */
else
@ -9106,7 +9106,7 @@ init_objc (void)
gcc_obstack_init (&util_obstack);
util_firstobj = (char *) obstack_finish (&util_obstack);
errbuf = (char *) xmalloc (1024 * 10);
errbuf = XNEWVEC (char, 1024 * 10);
hash_init ();
synth_module_prologue ();
}

View File

@ -232,7 +232,7 @@ write_langs (unsigned int mask)
if (mask & (1U << n))
len += strlen (lang_name) + 1;
result = xmalloc (len);
result = XNEWVEC (char, len);
len = 0;
for (n = 0; (lang_name = lang_names[n]) != 0; n++)
if (mask & (1U << n))
@ -288,7 +288,7 @@ handle_option (const char **argv, unsigned int lang_mask)
/* Drop the "no-" from negative switches. */
size_t len = strlen (opt) - 3;
dup = xmalloc (len + 1);
dup = XNEWVEC (char, len + 1);
dup[0] = '-';
dup[1] = opt[1];
memcpy (dup + 2, opt + 5, len - 2 + 1);
@ -765,7 +765,7 @@ common_handle_option (size_t scode, const char *arg, int value,
{
char *new_option;
int option_index;
new_option = (char *) xmalloc (strlen (arg) + 2);
new_option = XNEWVEC (char, strlen (arg) + 2);
new_option[0] = 'W';
strcpy (new_option+1, arg);
option_index = find_opt (new_option, lang_mask);

View File

@ -224,7 +224,7 @@ alloc_mem (void)
rtx insn;
/* Find the largest UID and create a mapping from UIDs to CUIDs. */
uid_cuid = xcalloc (get_max_uid () + 1, sizeof (int));
uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
i = 1;
FOR_EACH_BB (bb)
FOR_BB_INSNS (bb, insn)

View File

@ -731,7 +731,7 @@ reload_combine (void)
destination. */
min_labelno = get_first_label_num ();
n_labels = max_label_num () - min_labelno;
label_live = xmalloc (n_labels * sizeof (HARD_REG_SET));
label_live = XNEWVEC (HARD_REG_SET, n_labels);
CLEAR_HARD_REG_SET (ever_live_at_start);
FOR_EACH_BB_REVERSE (bb)

View File

@ -1270,7 +1270,7 @@ tree_bb_level_predictions (void)
basic_block bb;
int *heads;
heads = xmalloc (sizeof (int) * last_basic_block);
heads = XNEWVEC (int, last_basic_block);
memset (heads, ENTRY_BLOCK, sizeof (int) * last_basic_block);
heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;

View File

@ -677,7 +677,7 @@ void
pp_construct (pretty_printer *pp, const char *prefix, int maximum_length)
{
memset (pp, 0, sizeof (pretty_printer));
pp->buffer = xcalloc (1, sizeof (output_buffer));
pp->buffer = XCNEW (output_buffer);
obstack_init (&pp->buffer->chunk_obstack);
obstack_init (&pp->buffer->formatted_obstack);
pp->buffer->obstack = &pp->buffer->formatted_obstack;

View File

@ -50,7 +50,7 @@ static struct bucket **table;
void
debug_tree (tree node)
{
table = xcalloc (HASH_SIZE, sizeof (struct bucket *));
table = XCNEWVEC (struct bucket *, HASH_SIZE);
print_node (stderr, "", node, 0);
free (table);
table = 0;
@ -209,7 +209,7 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
}
/* Add this node to the table. */
b = xmalloc (sizeof (struct bucket));
b = XNEW (struct bucket);
b->node = node;
b->next = table[hash];
table[hash] = b;

View File

@ -693,8 +693,7 @@ compute_value_histograms (histogram_values values)
hist->hvalue.next = ann->histograms;
ann->histograms = hist;
hist->hvalue.counters =
xmalloc (sizeof (gcov_type) * hist->n_counters);
hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters);
for (j = 0; j < hist->n_counters; j++)
hist->hvalue.counters[j] = aact_count[j];
}

View File

@ -2926,7 +2926,7 @@ convert_regs_2 (FILE *file, basic_block block)
is only processed after all its predecessors. The number of predecessors
of every block has already been computed. */
stack = xmalloc (sizeof (*stack) * n_basic_blocks);
stack = XNEWVEC (basic_block, n_basic_blocks);
sp = stack;
*sp++ = block;

View File

@ -1190,11 +1190,11 @@ regclass (rtx f, int nregs, FILE *dump)
init_recog ();
costs = xmalloc (nregs * sizeof (struct costs));
costs = XNEWVEC (struct costs, nregs);
#ifdef FORBIDDEN_INC_DEC_CLASSES
in_inc_dec = xmalloc (nregs);
in_inc_dec = XNEWVEC (char, nregs);
#endif /* FORBIDDEN_INC_DEC_CLASSES */
@ -2143,8 +2143,7 @@ allocate_reg_info (size_t num_regs, int new_p, int renumber_p)
{
VARRAY_REG_INIT (reg_n_info, regno_allocated, "reg_n_info");
renumber = xmalloc (size_renumber);
reg_pref_buffer = xmalloc (regno_allocated
* sizeof (struct reg_pref));
reg_pref_buffer = XNEWVEC (struct reg_pref, regno_allocated);
}
else
{
@ -2155,14 +2154,13 @@ allocate_reg_info (size_t num_regs, int new_p, int renumber_p)
free ((char *) renumber);
free ((char *) reg_pref);
renumber = xmalloc (size_renumber);
reg_pref_buffer = xmalloc (regno_allocated
* sizeof (struct reg_pref));
reg_pref_buffer = XNEWVEC (struct reg_pref, regno_allocated);
}
else
{
renumber = xrealloc (renumber, size_renumber);
reg_pref_buffer = xrealloc (reg_pref_buffer,
reg_pref_buffer = (struct reg_pref *) xrealloc (reg_pref_buffer,
regno_allocated
* sizeof (struct reg_pref));
}
@ -2601,7 +2599,7 @@ record_subregs_of_mode (rtx subreg)
node = *slot;
if (node == NULL)
{
node = xcalloc (1, sizeof (*node));
node = XCNEW (struct subregs_of_mode_node);
node->block = regno & -8;
*slot = node;
}

View File

@ -1057,10 +1057,10 @@ regmove_optimize (rtx f, int nregs, FILE *regmove_dump_file)
can suppress some optimizations in those zones. */
mark_flags_life_zones (discover_flags_reg ());
regno_src_regno = xmalloc (sizeof *regno_src_regno * nregs);
regno_src_regno = XNEWVEC (int, nregs);
for (i = nregs; --i >= 0; ) regno_src_regno[i] = -1;
regmove_bb_head = xmalloc (sizeof (int) * (old_max_uid + 1));
regmove_bb_head = XNEWVEC (int, old_max_uid + 1);
for (i = old_max_uid; i >= 0; i--) regmove_bb_head[i] = -1;
FOR_EACH_BB (bb)
regmove_bb_head[INSN_UID (BB_HEAD (bb))] = bb->index;
@ -2197,7 +2197,7 @@ record_one_stack_memref (rtx insn, rtx *mem, struct csa_memlist *next_memlist)
{
struct csa_memlist *ml;
ml = xmalloc (sizeof (*ml));
ml = XNEW (struct csa_memlist);
if (XEXP (*mem, 0) == stack_pointer_rtx)
ml->sp_offset = 0;

View File

@ -1797,7 +1797,7 @@ copyprop_hardreg_forward (void)
need_refresh = false;
all_vd = xmalloc (sizeof (struct value_data) * last_basic_block);
all_vd = XNEWVEC (struct value_data, last_basic_block);
visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (visited);

View File

@ -699,15 +699,15 @@ reload (rtx first, int global)
Record memory equivalents in reg_mem_equiv so they can
be substituted eventually by altering the REG-rtx's. */
reg_equiv_constant = xcalloc (max_regno, sizeof (rtx));
reg_equiv_invariant = xcalloc (max_regno, sizeof (rtx));
reg_equiv_mem = xcalloc (max_regno, sizeof (rtx));
reg_equiv_address = xcalloc (max_regno, sizeof (rtx));
reg_max_ref_width = xcalloc (max_regno, sizeof (int));
reg_old_renumber = xcalloc (max_regno, sizeof (short));
reg_equiv_constant = XCNEWVEC (rtx, max_regno);
reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
reg_equiv_mem = XCNEWVEC (rtx, max_regno);
reg_equiv_address = XCNEWVEC (rtx, max_regno);
reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
reg_old_renumber = XCNEWVEC (short, max_regno);
memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
pseudo_forbidden_regs = xmalloc (max_regno * sizeof (HARD_REG_SET));
pseudo_previous_regs = xcalloc (max_regno, sizeof (HARD_REG_SET));
pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
CLEAR_HARD_REG_SET (bad_spill_regs_global);
@ -817,8 +817,8 @@ reload (rtx first, int global)
/* We used to use alloca here, but the size of what it would try to
allocate would occasionally cause it to exceed the stack limit and
cause a core dump. */
offsets_known_at = xmalloc (num_labels);
offsets_at = xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
offsets_known_at = XNEWVEC (char, num_labels);
offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
/* Alter each pseudo-reg rtx to contain its hard reg number.
Assign stack slots to the pseudos that lack hard regs or equivalents.
@ -3878,8 +3878,8 @@ reload_as_needed (int live_known)
memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
memset (spill_reg_store, 0, sizeof spill_reg_store);
reg_last_reload_reg = xcalloc (max_regno, sizeof (rtx));
reg_has_output_reload = xmalloc (max_regno);
reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
reg_has_output_reload = XNEWVEC (char, max_regno);
CLEAR_HARD_REG_SET (reg_reloaded_valid);
CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);

View File

@ -949,7 +949,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
{
/* Allocate a place to put our results and chain it into the
hash table. */
tinfo = xmalloc (sizeof (struct target_info));
tinfo = XNEW (struct target_info);
tinfo->uid = INSN_UID (target);
tinfo->block = b;
tinfo->next
@ -1226,8 +1226,8 @@ init_resource_info (rtx epilogue_insn)
}
/* Allocate and initialize the tables used by mark_target_live_regs. */
target_hash_table = xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
bb_ticks = xcalloc (last_basic_block, sizeof (int));
target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
bb_ticks = XCNEWVEC (int, last_basic_block);
}
/* Free up the resources allocated to mark_target_live_regs (). This

View File

@ -1492,7 +1492,7 @@ init_deps (struct deps *deps)
int max_reg = (reload_completed ? FIRST_PSEUDO_REGISTER : max_reg_num ());
deps->max_reg = max_reg;
deps->reg_last = xcalloc (max_reg, sizeof (struct deps_reg));
deps->reg_last = XCNEWVEC (struct deps_reg, max_reg);
INIT_REG_SET (&deps->reg_last_in_use);
INIT_REG_SET (&deps->reg_conditional_sets);
@ -1558,11 +1558,11 @@ init_dependency_caches (int luid)
if (luid / n_basic_blocks > 100 * 5)
{
int i;
true_dependency_cache = xmalloc (luid * sizeof (bitmap_head));
anti_dependency_cache = xmalloc (luid * sizeof (bitmap_head));
output_dependency_cache = xmalloc (luid * sizeof (bitmap_head));
true_dependency_cache = XNEWVEC (bitmap_head, luid);
anti_dependency_cache = XNEWVEC (bitmap_head, luid);
output_dependency_cache = XNEWVEC (bitmap_head, luid);
#ifdef ENABLE_CHECKING
forward_dependency_cache = xmalloc (luid * sizeof (bitmap_head));
forward_dependency_cache = XNEWVEC (bitmap_head, luid);
#endif
for (i = 0; i < luid; i++)
{

Some files were not shown because too many files have changed in this diff Show More