bitmap.h (BITMAP_XMALLOC): New macro.
* bitmap.h (BITMAP_XMALLOC): New macro. * flow.c (CLEAN_ALLOCA): Remove. (delete_unreachable_blocks): Use xmalloc/xcalloc instead of alloca. (life_analysis): Likewise. (update_life_info): Don't use CLEAN_ALLOCA. (life_analysis_1): Use xmalloc/xcalloc instead of alloca. (calculate_global_regs_live): Likewise. (print_rtl_with_bb): Likewise. (verify_flow_info): Likewise. * global.c (global_alloc): Likewise. (global_conflicts): Likewise. * integrate.c (save_for_inline_nocopy): Likewise. (expand_inline_function): Likewise. * jump.c (jump_optimize_1): Likewise. (duplicate_loop_exit_test): Likewise. (thread_jumps): Likewise. * loop.c (loop_optimize): Likewise. (combine_givs): Likewise. (recombine_givs): Likewise. * reorg.c (dbr_schedule): Likewise. * unroll.c (unroll_loop): Likewise. From-SVN: r30333
This commit is contained in:
parent
20bbd3cd53
commit
67289ea639
@ -1,5 +1,26 @@
|
||||
Mon Nov 1 15:41:01 1999 Mark P. Mitchell <mark@codesourcery.com>
|
||||
|
||||
* bitmap.h (BITMAP_XMALLOC): New macro.
|
||||
* flow.c (CLEAN_ALLOCA): Remove.
|
||||
(delete_unreachable_blocks): Use xmalloc/xcalloc instead of alloca.
|
||||
(life_analysis): Likewise.
|
||||
(update_life_info): Don't use CLEAN_ALLOCA.
|
||||
(life_analysis_1): Use xmalloc/xcalloc instead of alloca.
|
||||
(calculate_global_regs_live): Likewise.
|
||||
(print_rtl_with_bb): Likewise.
|
||||
(verify_flow_info): Likewise.
|
||||
* global.c (global_alloc): Likewise.
|
||||
(global_conflicts): Likewise.
|
||||
* integrate.c (save_for_inline_nocopy): Likewise.
|
||||
(expand_inline_function): Likewise.
|
||||
* jump.c (jump_optimize_1): Likewise.
|
||||
(duplicate_loop_exit_test): Likewise.
|
||||
(thread_jumps): Likewise.
|
||||
* loop.c (loop_optimize): Likewise.
|
||||
(combine_givs): Likewise.
|
||||
(recombine_givs): Likewise.
|
||||
* reorg.c (dbr_schedule): Likewise.
|
||||
|
||||
* combine.c (combine_instructions): Use xmalloc instead of alloca.
|
||||
|
||||
Mon Nov 1 13:22:30 1999 Richard Henderson <rth@cygnus.com>
|
||||
|
@ -114,6 +114,10 @@ extern void debug_bitmap PROTO((bitmap));
|
||||
#define BITMAP_ALLOCA() \
|
||||
bitmap_initialize ((bitmap) alloca (sizeof (bitmap_head)))
|
||||
|
||||
/* Allocate a bitmap with xmalloc. */
|
||||
#define BITMAP_XMALLOC() \
|
||||
bitmap_initialize ((bitmap) xmalloc (sizeof (bitmap_head)))
|
||||
|
||||
/* Do any cleanup needed on a bitmap when it is no longer used. */
|
||||
#define BITMAP_FREE(BITMAP) \
|
||||
do { \
|
||||
|
47
gcc/flow.c
47
gcc/flow.c
@ -157,13 +157,6 @@ Boston, MA 02111-1307, USA. */
|
||||
#define HAVE_prologue 0
|
||||
#endif
|
||||
|
||||
#ifdef USE_C_ALLOCA
|
||||
#define CLEAN_ALLOCA alloca (0)
|
||||
#else
|
||||
#define CLEAN_ALLOCA
|
||||
#endif
|
||||
|
||||
|
||||
/* The contents of the current function definition are allocated
|
||||
in this obstack, and all are freed at the end of the function.
|
||||
For top-level functions, this is temporary_obstack.
|
||||
@ -1668,7 +1661,7 @@ delete_unreachable_blocks ()
|
||||
int i, n;
|
||||
|
||||
n = n_basic_blocks;
|
||||
tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
|
||||
tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
|
||||
|
||||
/* Use basic_block->aux as a marker. Clear them all. */
|
||||
|
||||
@ -1752,6 +1745,8 @@ delete_unreachable_blocks ()
|
||||
blocks to remove as well. */
|
||||
if (deleted_handler)
|
||||
delete_eh_regions ();
|
||||
|
||||
free (worklist);
|
||||
}
|
||||
|
||||
/* Find EH regions for which there is no longer a handler, and delete them. */
|
||||
@ -2453,7 +2448,7 @@ life_analysis (f, nregs, file, remove_dead_code)
|
||||
#endif
|
||||
|
||||
/* Allocate a bitmap to be filled in by record_volatile_insns. */
|
||||
uid_volatile = BITMAP_ALLOCA ();
|
||||
uid_volatile = BITMAP_XMALLOC ();
|
||||
|
||||
/* We want alias analysis information for local dead store elimination. */
|
||||
init_alias_analysis ();
|
||||
@ -2472,6 +2467,7 @@ life_analysis (f, nregs, file, remove_dead_code)
|
||||
dump_flow_info (file);
|
||||
|
||||
BITMAP_FREE (uid_volatile);
|
||||
free (uid_volatile);
|
||||
free_basic_block_vars (1);
|
||||
}
|
||||
|
||||
@ -2601,8 +2597,6 @@ update_life_info (blocks, extent, prop_flags)
|
||||
|
||||
if (extent == UPDATE_LIFE_LOCAL)
|
||||
verify_local_live_at_start (tmp, bb);
|
||||
|
||||
CLEAN_ALLOCA;
|
||||
});
|
||||
|
||||
FREE_REG_SET (tmp);
|
||||
@ -2916,8 +2910,7 @@ life_analysis_1 (f, nregs, flags)
|
||||
allocate_reg_life_data ();
|
||||
allocate_bb_life_data ();
|
||||
|
||||
reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
|
||||
memset (reg_next_use, 0, nregs * sizeof (rtx));
|
||||
reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
|
||||
|
||||
/* Assume that the stack pointer is unchanging if alloca hasn't been used.
|
||||
This will be cleared by record_volatile_insns if it encounters an insn
|
||||
@ -2971,8 +2964,6 @@ life_analysis_1 (f, nregs, flags)
|
||||
|
||||
COPY_REG_SET (tmp, bb->global_live_at_end);
|
||||
propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
|
||||
|
||||
CLEAN_ALLOCA;
|
||||
}
|
||||
|
||||
FREE_REG_SET (tmp);
|
||||
@ -3000,6 +2991,8 @@ life_analysis_1 (f, nregs, flags)
|
||||
if (reload_completed)
|
||||
memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
|
||||
|
||||
/* Clean up. */
|
||||
free (reg_next_use);
|
||||
reg_next_use = NULL;
|
||||
}
|
||||
|
||||
@ -3022,7 +3015,7 @@ calculate_global_regs_live (blocks_in, blocks_out, flags)
|
||||
/* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
|
||||
because the `head == tail' style test for an empty queue doesn't
|
||||
work with a full queue. */
|
||||
queue = (basic_block *) alloca ((n_basic_blocks + 2) * sizeof (*queue));
|
||||
queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
|
||||
qtail = queue;
|
||||
qhead = qend = queue + n_basic_blocks + 2;
|
||||
|
||||
@ -3158,6 +3151,8 @@ calculate_global_regs_live (blocks_in, blocks_out, flags)
|
||||
basic_block bb = BASIC_BLOCK (i);
|
||||
FREE_REG_SET (bb->local_set);
|
||||
});
|
||||
|
||||
free (queue);
|
||||
}
|
||||
|
||||
/* Subroutines of life analysis. */
|
||||
@ -5069,15 +5064,11 @@ print_rtl_with_bb (outf, rtx_first)
|
||||
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
|
||||
int max_uid = get_max_uid ();
|
||||
basic_block *start = (basic_block *)
|
||||
alloca (max_uid * sizeof (basic_block));
|
||||
xcalloc (max_uid, sizeof (basic_block));
|
||||
basic_block *end = (basic_block *)
|
||||
alloca (max_uid * sizeof (basic_block));
|
||||
xcalloc (max_uid, sizeof (basic_block));
|
||||
enum bb_state *in_bb_p = (enum bb_state *)
|
||||
alloca (max_uid * sizeof (enum bb_state));
|
||||
|
||||
memset (start, 0, max_uid * sizeof (basic_block));
|
||||
memset (end, 0, max_uid * sizeof (basic_block));
|
||||
memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
|
||||
xcalloc (max_uid, sizeof (enum bb_state));
|
||||
|
||||
for (i = n_basic_blocks - 1; i >= 0; i--)
|
||||
{
|
||||
@ -5134,6 +5125,10 @@ print_rtl_with_bb (outf, rtx_first)
|
||||
if (did_output)
|
||||
putc ('\n', outf);
|
||||
}
|
||||
|
||||
free (start);
|
||||
free (end);
|
||||
free (in_bb_p);
|
||||
}
|
||||
|
||||
if (current_function_epilogue_delay_list != 0)
|
||||
@ -5878,8 +5873,7 @@ verify_flow_info ()
|
||||
rtx x;
|
||||
int i, err = 0;
|
||||
|
||||
bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
|
||||
memset (bb_info, 0, max_uid * sizeof (basic_block));
|
||||
bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
|
||||
|
||||
/* First pass check head/end pointers and set bb_info array used by
|
||||
later passes. */
|
||||
@ -6074,6 +6068,9 @@ verify_flow_info ()
|
||||
|
||||
if (err)
|
||||
abort ();
|
||||
|
||||
/* Clean up. */
|
||||
free (bb_info);
|
||||
}
|
||||
|
||||
/* Functions to access an edge list with a vector representation.
|
||||
|
69
gcc/global.c
69
gcc/global.c
@ -372,15 +372,14 @@ global_alloc (file)
|
||||
/* Establish mappings from register number to allocation number
|
||||
and vice versa. In the process, count the allocnos. */
|
||||
|
||||
reg_allocno = (int *) alloca (max_regno * sizeof (int));
|
||||
reg_allocno = (int *) xmalloc (max_regno * sizeof (int));
|
||||
|
||||
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
||||
reg_allocno[i] = -1;
|
||||
|
||||
/* Initialize the shared-hard-reg mapping
|
||||
from the list of pairs that may share. */
|
||||
reg_may_share = (int *) alloca (max_regno * sizeof (int));
|
||||
bzero ((char *) reg_may_share, max_regno * sizeof (int));
|
||||
reg_may_share = (int *) xcalloc (max_regno, sizeof (int));
|
||||
for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
|
||||
{
|
||||
int r1 = REGNO (XEXP (x, 0));
|
||||
@ -411,15 +410,11 @@ global_alloc (file)
|
||||
else
|
||||
reg_allocno[i] = -1;
|
||||
|
||||
allocno_reg = (int *) alloca (max_allocno * sizeof (int));
|
||||
allocno_size = (int *) alloca (max_allocno * sizeof (int));
|
||||
allocno_calls_crossed = (int *) alloca (max_allocno * sizeof (int));
|
||||
allocno_n_refs = (int *) alloca (max_allocno * sizeof (int));
|
||||
allocno_live_length = (int *) alloca (max_allocno * sizeof (int));
|
||||
bzero ((char *) allocno_size, max_allocno * sizeof (int));
|
||||
bzero ((char *) allocno_calls_crossed, max_allocno * sizeof (int));
|
||||
bzero ((char *) allocno_n_refs, max_allocno * sizeof (int));
|
||||
bzero ((char *) allocno_live_length, max_allocno * sizeof (int));
|
||||
allocno_reg = (int *) xmalloc (max_allocno * sizeof (int));
|
||||
allocno_size = (int *) xcalloc (max_allocno, sizeof (int));
|
||||
allocno_calls_crossed = (int *) xcalloc (max_allocno, sizeof (int));
|
||||
allocno_n_refs = (int *) xcalloc (max_allocno, sizeof (int));
|
||||
allocno_live_length = (int *) xcalloc (max_allocno, sizeof (int));
|
||||
|
||||
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
|
||||
if (reg_allocno[i] >= 0)
|
||||
@ -461,26 +456,15 @@ global_alloc (file)
|
||||
initialize them. */
|
||||
|
||||
hard_reg_conflicts
|
||||
= (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
|
||||
bzero ((char *) hard_reg_conflicts, max_allocno * sizeof (HARD_REG_SET));
|
||||
|
||||
= (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
|
||||
hard_reg_preferences
|
||||
= (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
|
||||
bzero ((char *) hard_reg_preferences, max_allocno * sizeof (HARD_REG_SET));
|
||||
|
||||
= (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
|
||||
hard_reg_copy_preferences
|
||||
= (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
|
||||
bzero ((char *) hard_reg_copy_preferences,
|
||||
max_allocno * sizeof (HARD_REG_SET));
|
||||
|
||||
= (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
|
||||
hard_reg_full_preferences
|
||||
= (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
|
||||
bzero ((char *) hard_reg_full_preferences,
|
||||
max_allocno * sizeof (HARD_REG_SET));
|
||||
|
||||
= (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
|
||||
regs_someone_prefers
|
||||
= (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
|
||||
bzero ((char *) regs_someone_prefers, max_allocno * sizeof (HARD_REG_SET));
|
||||
= (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
|
||||
|
||||
allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
|
||||
|
||||
@ -490,7 +474,7 @@ global_alloc (file)
|
||||
conflicts = (INT_TYPE *) xcalloc (max_allocno * allocno_row_words,
|
||||
sizeof (INT_TYPE));
|
||||
|
||||
allocnos_live = (INT_TYPE *) alloca (allocno_row_words * sizeof (INT_TYPE));
|
||||
allocnos_live = (INT_TYPE *) xmalloc (allocno_row_words * sizeof (INT_TYPE));
|
||||
|
||||
/* If there is work to be done (at least one reg to allocate),
|
||||
perform global conflict analysis and allocate the regs. */
|
||||
@ -523,7 +507,7 @@ global_alloc (file)
|
||||
|
||||
/* Determine the order to allocate the remaining pseudo registers. */
|
||||
|
||||
allocno_order = (int *) alloca (max_allocno * sizeof (int));
|
||||
allocno_order = (int *) xmalloc (max_allocno * sizeof (int));
|
||||
for (i = 0; i < (size_t) max_allocno; i++)
|
||||
allocno_order[i] = i;
|
||||
|
||||
@ -568,6 +552,8 @@ global_alloc (file)
|
||||
if (reg_alternate_class (allocno_reg[allocno_order[i]]) != NO_REGS)
|
||||
find_reg (allocno_order[i], 0, 1, 0, 0);
|
||||
}
|
||||
|
||||
free (allocno_order);
|
||||
}
|
||||
|
||||
/* Do the reloads now while the allocno data still exist, so that we can
|
||||
@ -582,7 +568,22 @@ global_alloc (file)
|
||||
retval = reload (get_insns (), 1, file);
|
||||
}
|
||||
|
||||
/* Clean up. */
|
||||
free (reg_allocno);
|
||||
free (reg_may_share);
|
||||
free (allocno_reg);
|
||||
free (allocno_size);
|
||||
free (allocno_calls_crossed);
|
||||
free (allocno_n_refs);
|
||||
free (allocno_live_length);
|
||||
free (hard_reg_conflicts);
|
||||
free (hard_reg_preferences);
|
||||
free (hard_reg_copy_preferences);
|
||||
free (hard_reg_full_preferences);
|
||||
free (regs_someone_prefers);
|
||||
free (conflicts);
|
||||
free (allocnos_live);
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
@ -626,9 +627,9 @@ global_conflicts ()
|
||||
int *block_start_allocnos;
|
||||
|
||||
/* Make a vector that mark_reg_{store,clobber} will store in. */
|
||||
regs_set = (rtx *) alloca (max_parallel * sizeof (rtx) * 2);
|
||||
regs_set = (rtx *) xmalloc (max_parallel * sizeof (rtx) * 2);
|
||||
|
||||
block_start_allocnos = (int *) alloca (max_allocno * sizeof (int));
|
||||
block_start_allocnos = (int *) xmalloc (max_allocno * sizeof (int));
|
||||
|
||||
for (b = 0; b < n_basic_blocks; b++)
|
||||
{
|
||||
@ -788,6 +789,10 @@ global_conflicts ()
|
||||
insn = NEXT_INSN (insn);
|
||||
}
|
||||
}
|
||||
|
||||
/* Clean up. */
|
||||
free (block_start_allocnos);
|
||||
free (regs_set);
|
||||
}
|
||||
/* Expand the preference information by looking for cases where one allocno
|
||||
dies in an insn that sets an allocno. If those two allocnos don't conflict,
|
||||
|
@ -350,7 +350,7 @@ save_for_inline_nocopy (fndecl)
|
||||
for the parms, prior to elimination of virtual registers.
|
||||
These values are needed for substituting parms properly. */
|
||||
|
||||
parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
|
||||
parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
|
||||
|
||||
/* Make and emit a return-label if we have not already done so. */
|
||||
|
||||
@ -401,6 +401,9 @@ save_for_inline_nocopy (fndecl)
|
||||
current_function->original_arg_vector = argvec;
|
||||
current_function->original_decl_initial = DECL_INITIAL (fndecl);
|
||||
DECL_SAVED_INSNS (fndecl) = current_function;
|
||||
|
||||
/* Clean up. */
|
||||
free (parmdecl_map);
|
||||
}
|
||||
|
||||
/* Note whether a parameter is modified or not. */
|
||||
@ -566,8 +569,8 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
/* Expand the function arguments. Do this first so that any
|
||||
new registers get created before we allocate the maps. */
|
||||
|
||||
arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
|
||||
arg_trees = (tree *) alloca (nargs * sizeof (tree));
|
||||
arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
|
||||
arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
|
||||
|
||||
for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
|
||||
formal;
|
||||
@ -649,11 +652,10 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
|
||||
/* Allocate the structures we use to remap things. */
|
||||
|
||||
map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
|
||||
map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
|
||||
map->fndecl = fndecl;
|
||||
|
||||
map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
|
||||
bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
|
||||
map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
|
||||
|
||||
/* We used to use alloca here, but the size of what it would try to
|
||||
allocate would occasionally cause it to exceed the stack limit and
|
||||
@ -663,8 +665,7 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
map->label_map = real_label_map;
|
||||
|
||||
inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
|
||||
map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
|
||||
bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
|
||||
map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
|
||||
map->min_insnno = 0;
|
||||
map->max_insnno = inl_max_uid;
|
||||
|
||||
@ -1356,8 +1357,13 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
/* Make sure we free the things we explicitly allocated with xmalloc. */
|
||||
if (real_label_map)
|
||||
free (real_label_map);
|
||||
if (map)
|
||||
VARRAY_FREE (map->const_equiv_varray);
|
||||
VARRAY_FREE (map->const_equiv_varray);
|
||||
free (map->reg_map);
|
||||
free (map->insn_map);
|
||||
free (map);
|
||||
free (arg_vals);
|
||||
free (arg_trees);
|
||||
|
||||
inlining = inlining_previous;
|
||||
|
||||
return target;
|
||||
|
35
gcc/jump.c
35
gcc/jump.c
@ -205,8 +205,7 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
/* Leave some extra room for labels and duplicate exit test insns
|
||||
we make. */
|
||||
max_jump_chain = max_uid * 14 / 10;
|
||||
jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
|
||||
bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
|
||||
jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
|
||||
|
||||
mark_all_labels (f, cross_jump);
|
||||
|
||||
@ -227,7 +226,7 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
/* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
|
||||
notes and recompute LABEL_NUSES. */
|
||||
if (mark_labels_only)
|
||||
return;
|
||||
goto end;
|
||||
|
||||
exception_optimize ();
|
||||
|
||||
@ -245,10 +244,8 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
/* Zero the "deleted" flag of all the "deleted" insns. */
|
||||
for (insn = f; insn; insn = NEXT_INSN (insn))
|
||||
INSN_DELETED_P (insn) = 0;
|
||||
|
||||
/* Show that the jump chain is not valid. */
|
||||
jump_chain = 0;
|
||||
return;
|
||||
|
||||
goto end;
|
||||
}
|
||||
|
||||
#ifdef HAVE_return
|
||||
@ -2301,7 +2298,9 @@ jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
|
||||
if (calculate_can_reach_end (last_insn, 0, 1))
|
||||
can_reach_end = 1;
|
||||
|
||||
/* Show JUMP_CHAIN no longer valid. */
|
||||
end:
|
||||
/* Clean up. */
|
||||
free (jump_chain);
|
||||
jump_chain = 0;
|
||||
}
|
||||
|
||||
@ -2872,10 +2871,7 @@ duplicate_loop_exit_test (loop_start)
|
||||
/* We can do the replacement. Allocate reg_map if this is the
|
||||
first replacement we found. */
|
||||
if (reg_map == 0)
|
||||
{
|
||||
reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
|
||||
bzero ((char *) reg_map, max_reg * sizeof (rtx));
|
||||
}
|
||||
reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
|
||||
|
||||
REG_LOOP_TEST_P (reg) = 1;
|
||||
|
||||
@ -2986,6 +2982,10 @@ duplicate_loop_exit_test (loop_start)
|
||||
emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
|
||||
|
||||
delete_insn (next_nonnote_insn (loop_start));
|
||||
|
||||
/* Clean up. */
|
||||
if (reg_map)
|
||||
free (reg_map);
|
||||
|
||||
return 1;
|
||||
}
|
||||
@ -5182,9 +5182,9 @@ thread_jumps (f, max_reg, flag_before_loop)
|
||||
int *all_reset;
|
||||
|
||||
/* Allocate register tables and quick-reset table. */
|
||||
modified_regs = (char *) alloca (max_reg * sizeof (char));
|
||||
same_regs = (int *) alloca (max_reg * sizeof (int));
|
||||
all_reset = (int *) alloca (max_reg * sizeof (int));
|
||||
modified_regs = (char *) xmalloc (max_reg * sizeof (char));
|
||||
same_regs = (int *) xmalloc (max_reg * sizeof (int));
|
||||
all_reset = (int *) xmalloc (max_reg * sizeof (int));
|
||||
for (i = 0; i < max_reg; i++)
|
||||
all_reset[i] = -1;
|
||||
|
||||
@ -5342,6 +5342,11 @@ thread_jumps (f, max_reg, flag_before_loop)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Clean up. */
|
||||
free (modified_regs);
|
||||
free (same_regs);
|
||||
free (all_reset);
|
||||
}
|
||||
|
||||
/* This is like RTX_EQUAL_P except that it knows about our handling of
|
||||
|
67
gcc/loop.c
67
gcc/loop.c
@ -464,9 +464,6 @@ loop_optimize (f, dumpfile, unroll_p, bct_p)
|
||||
|
||||
max_reg_before_loop = max_reg_num ();
|
||||
|
||||
moved_once = (char *) alloca (max_reg_before_loop);
|
||||
bzero (moved_once, max_reg_before_loop);
|
||||
|
||||
regs_may_share = 0;
|
||||
|
||||
/* Count the number of loops. */
|
||||
@ -483,31 +480,29 @@ loop_optimize (f, dumpfile, unroll_p, bct_p)
|
||||
if (max_loop_num == 0)
|
||||
return;
|
||||
|
||||
moved_once = (char *) xcalloc (max_reg_before_loop, sizeof (char));
|
||||
|
||||
/* Get size to use for tables indexed by uids.
|
||||
Leave some space for labels allocated by find_and_verify_loops. */
|
||||
max_uid_for_loop = get_max_uid () + 1 + max_loop_num * 32;
|
||||
|
||||
uid_luid = (int *) alloca (max_uid_for_loop * sizeof (int));
|
||||
uid_loop_num = (int *) alloca (max_uid_for_loop * sizeof (int));
|
||||
|
||||
bzero ((char *) uid_luid, max_uid_for_loop * sizeof (int));
|
||||
bzero ((char *) uid_loop_num, max_uid_for_loop * sizeof (int));
|
||||
uid_luid = (int *) xcalloc (max_uid_for_loop, sizeof (int));
|
||||
uid_loop_num = (int *) xcalloc (max_uid_for_loop, sizeof (int));
|
||||
|
||||
/* Allocate tables for recording each loop. We set each entry, so they need
|
||||
not be zeroed. */
|
||||
loop_number_loop_starts = (rtx *) alloca (max_loop_num * sizeof (rtx));
|
||||
loop_number_loop_ends = (rtx *) alloca (max_loop_num * sizeof (rtx));
|
||||
loop_number_loop_cont = (rtx *) alloca (max_loop_num * sizeof (rtx));
|
||||
loop_number_cont_dominator = (rtx *) alloca (max_loop_num * sizeof (rtx));
|
||||
loop_outer_loop = (int *) alloca (max_loop_num * sizeof (int));
|
||||
loop_invalid = (char *) alloca (max_loop_num * sizeof (char));
|
||||
loop_number_exit_labels = (rtx *) alloca (max_loop_num * sizeof (rtx));
|
||||
loop_number_exit_count = (int *) alloca (max_loop_num * sizeof (int));
|
||||
loop_number_loop_starts = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
|
||||
loop_number_loop_ends = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
|
||||
loop_number_loop_cont = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
|
||||
loop_number_cont_dominator = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
|
||||
loop_outer_loop = (int *) xmalloc (max_loop_num * sizeof (int));
|
||||
loop_invalid = (char *) xmalloc (max_loop_num * sizeof (char));
|
||||
loop_number_exit_labels = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
|
||||
loop_number_exit_count = (int *) xmalloc (max_loop_num * sizeof (int));
|
||||
|
||||
#ifdef HAVE_decrement_and_branch_on_count
|
||||
/* Allocate for BCT optimization */
|
||||
loop_used_count_register = (int *) alloca (max_loop_num * sizeof (int));
|
||||
bzero ((char *) loop_used_count_register, max_loop_num * sizeof (int));
|
||||
loop_used_count_register = (int *) xcalloc (max_loop_num, sizeof (int));
|
||||
#endif /* HAVE_decrement_and_branch_on_count */
|
||||
|
||||
/* Find and process each loop.
|
||||
@ -574,6 +569,23 @@ loop_optimize (f, dumpfile, unroll_p, bct_p)
|
||||
unroll_block_trees ();
|
||||
|
||||
end_alias_analysis ();
|
||||
|
||||
/* Clean up. */
|
||||
free (moved_once);
|
||||
free (uid_luid);
|
||||
free (uid_loop_num);
|
||||
free (loop_number_loop_starts);
|
||||
free (loop_number_loop_ends);
|
||||
free (loop_number_loop_cont);
|
||||
free (loop_number_cont_dominator);
|
||||
free (loop_outer_loop);
|
||||
free (loop_invalid);
|
||||
free (loop_number_exit_labels);
|
||||
free (loop_number_exit_count);
|
||||
#ifdef HAVE_decrement_and_branch_on_count
|
||||
free (loop_used_count_register);
|
||||
#endif /* HAVE_decrement_and_branch_on_count */
|
||||
|
||||
}
|
||||
|
||||
/* Returns the next insn, in execution order, after INSN. START and
|
||||
@ -7116,11 +7128,8 @@ combine_givs (bl)
|
||||
if (!g1->ignore)
|
||||
giv_array[i++] = g1;
|
||||
|
||||
stats = (struct combine_givs_stats *) alloca (giv_count * sizeof (*stats));
|
||||
bzero ((char *) stats, giv_count * sizeof (*stats));
|
||||
|
||||
can_combine = (rtx *) alloca (giv_count * giv_count * sizeof(rtx));
|
||||
bzero ((char *) can_combine, giv_count * giv_count * sizeof(rtx));
|
||||
stats = (struct combine_givs_stats *) xcalloc (giv_count, sizeof (*stats));
|
||||
can_combine = (rtx *) xcalloc (giv_count, giv_count * sizeof(rtx));
|
||||
|
||||
for (i = 0; i < giv_count; i++)
|
||||
{
|
||||
@ -7250,6 +7259,10 @@ restart:
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
|
||||
/* Clean up. */
|
||||
free (stats);
|
||||
free (can_combine);
|
||||
}
|
||||
|
||||
struct recombine_givs_stats
|
||||
@ -7387,8 +7400,8 @@ recombine_givs (bl, loop_start, loop_end, unroll_p)
|
||||
giv_count++;
|
||||
}
|
||||
giv_array
|
||||
= (struct induction **) alloca (giv_count * sizeof (struct induction *));
|
||||
stats = (struct recombine_givs_stats *) alloca (giv_count * sizeof *stats);
|
||||
= (struct induction **) xmalloc (giv_count * sizeof (struct induction *));
|
||||
stats = (struct recombine_givs_stats *) xmalloc (giv_count * sizeof *stats);
|
||||
|
||||
/* Initialize stats and set up the ix field for each giv in stats to name
|
||||
the corresponding index into stats. */
|
||||
@ -7679,6 +7692,10 @@ recombine_givs (bl, loop_start, loop_end, unroll_p)
|
||||
rescan = i;
|
||||
}
|
||||
}
|
||||
|
||||
/* Clean up. */
|
||||
free (giv_array);
|
||||
free (stats);
|
||||
}
|
||||
|
||||
/* EMIT code before INSERT_BEFORE to set REG = B * M + A. */
|
||||
|
@ -3531,7 +3531,7 @@ dbr_schedule (first, file)
|
||||
epilogue_insn = insn;
|
||||
}
|
||||
|
||||
uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
|
||||
uid_to_ruid = (int *) xmalloc ((max_uid + 1) * sizeof (int));
|
||||
for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
|
||||
uid_to_ruid[INSN_UID (insn)] = i;
|
||||
|
||||
@ -3676,5 +3676,6 @@ dbr_schedule (first, file)
|
||||
REG_NOTES (insn));
|
||||
}
|
||||
free_resource_info ();
|
||||
free (uid_to_ruid);
|
||||
}
|
||||
#endif /* DELAY_SLOTS */
|
||||
|
49
gcc/unroll.c
49
gcc/unroll.c
@ -682,7 +682,7 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
max_labelno = max_label_num ();
|
||||
max_insnno = get_max_uid ();
|
||||
|
||||
map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
|
||||
map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
|
||||
|
||||
map->integrating = 0;
|
||||
map->const_equiv_varray = 0;
|
||||
@ -691,10 +691,9 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
|
||||
if (max_labelno > 0)
|
||||
{
|
||||
map->label_map = (rtx *) alloca (max_labelno * sizeof (rtx));
|
||||
map->label_map = (rtx *) xmalloc (max_labelno * sizeof (rtx));
|
||||
|
||||
local_label = (char *) alloca (max_labelno);
|
||||
bzero (local_label, max_labelno);
|
||||
local_label = (char *) xcalloc (max_labelno, sizeof (char));
|
||||
}
|
||||
else
|
||||
map->label_map = 0;
|
||||
@ -742,7 +741,7 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
|
||||
/* Allocate space for the insn map. */
|
||||
|
||||
map->insn_map = (rtx *) alloca (max_insnno * sizeof (rtx));
|
||||
map->insn_map = (rtx *) xmalloc (max_insnno * sizeof (rtx));
|
||||
|
||||
/* Set this to zero, to indicate that we are doing loop unrolling,
|
||||
not function inlining. */
|
||||
@ -768,17 +767,12 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
preconditioning code and find_splittable_regs will never be used
|
||||
to access the splittable_regs[] and addr_combined_regs[] arrays. */
|
||||
|
||||
splittable_regs = (rtx *) alloca (maxregnum * sizeof (rtx));
|
||||
bzero ((char *) splittable_regs, maxregnum * sizeof (rtx));
|
||||
derived_regs = (char *) alloca (maxregnum);
|
||||
bzero (derived_regs, maxregnum);
|
||||
splittable_regs_updates = (int *) alloca (maxregnum * sizeof (int));
|
||||
bzero ((char *) splittable_regs_updates, maxregnum * sizeof (int));
|
||||
splittable_regs = (rtx *) xcalloc (maxregnum, sizeof (rtx));
|
||||
derived_regs = (char *) xcalloc (maxregnum, sizeof (char));
|
||||
splittable_regs_updates = (int *) xcalloc (maxregnum, sizeof (int));
|
||||
addr_combined_regs
|
||||
= (struct induction **) alloca (maxregnum * sizeof (struct induction *));
|
||||
bzero ((char *) addr_combined_regs, maxregnum * sizeof (struct induction *));
|
||||
local_regno = (char *) alloca (maxregnum);
|
||||
bzero (local_regno, maxregnum);
|
||||
= (struct induction **) xcalloc (maxregnum, sizeof (struct induction *));
|
||||
local_regno = (char *) xcalloc (maxregnum, sizeof (char));
|
||||
|
||||
/* Mark all local registers, i.e. the ones which are referenced only
|
||||
inside the loop. */
|
||||
@ -884,7 +878,7 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
rtx *labels;
|
||||
int abs_inc, neg_inc;
|
||||
|
||||
map->reg_map = (rtx *) alloca (maxregnum * sizeof (rtx));
|
||||
map->reg_map = (rtx *) xmalloc (maxregnum * sizeof (rtx));
|
||||
|
||||
VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray, maxregnum,
|
||||
"unroll_loop");
|
||||
@ -930,7 +924,7 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
/* Now emit a sequence of branches to jump to the proper precond
|
||||
loop entry point. */
|
||||
|
||||
labels = (rtx *) alloca (sizeof (rtx) * unroll_number);
|
||||
labels = (rtx *) xmalloc (sizeof (rtx) * unroll_number);
|
||||
for (i = 0; i < unroll_number; i++)
|
||||
labels[i] = gen_label_rtx ();
|
||||
|
||||
@ -1109,6 +1103,9 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
/* Set unroll type to MODULO now. */
|
||||
unroll_type = UNROLL_MODULO;
|
||||
loop_preconditioned = 1;
|
||||
|
||||
/* Clean up. */
|
||||
free (labels);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1146,7 +1143,7 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
the constant maps also. */
|
||||
|
||||
maxregnum = max_reg_num ();
|
||||
map->reg_map = (rtx *) alloca (maxregnum * sizeof (rtx));
|
||||
map->reg_map = (rtx *) xmalloc (maxregnum * sizeof (rtx));
|
||||
|
||||
init_reg_map (map, maxregnum);
|
||||
|
||||
@ -1286,8 +1283,22 @@ unroll_loop (loop_end, insn_count, loop_start, end_insert_before,
|
||||
emit_label_after (exit_label, loop_end);
|
||||
|
||||
egress:
|
||||
if (map && map->const_equiv_varray)
|
||||
if (map->const_equiv_varray)
|
||||
VARRAY_FREE (map->const_equiv_varray);
|
||||
if (map->label_map)
|
||||
{
|
||||
free (map->label_map);
|
||||
free (local_label);
|
||||
}
|
||||
free (map->insn_map);
|
||||
free (splittable_regs);
|
||||
free (derived_regs);
|
||||
free (splittable_regs_updates);
|
||||
free (addr_combined_regs);
|
||||
free (local_regno);
|
||||
if (map->reg_map)
|
||||
free (map->reg_map);
|
||||
free (map);
|
||||
}
|
||||
|
||||
/* Return true if the loop can be safely, and profitably, preconditioned
|
||||
|
Loading…
Reference in New Issue
Block a user