re PR middle-end/30907 (Propagation of addresses within loops pessimizes code)
2007-03-19 Paolo Bonzini <bonzini@gnu.org> PR rtl-optimization/30907 * fwprop.c (forward_propagate_into): Never propagate inside a loop. (fwprop_init): Always call loop_optimizer_initialize. (fwprop_done): Always call loop_optimizer_finalize. (fwprop): We always have loop info now. (gate_fwprop_addr): Remove. (pass_fwprop_addr): Use gate_fwprop as gate. PR rtl-optimization/30841 * df-problems.c (df_ru_local_compute, df_rd_local_compute, df_chain_alloc): Call df_reorganize_refs unconditionally. * df-scan.c (df_rescan_blocks, df_reorganize_refs): Change refs_organized to refs_organized_size. (df_ref_create_structure): Use refs_organized_size instead of bitmap_size if refs had been organized, and keep refs_organized_size up-to-date. * df.h (struct df_ref_info): Change refs_organized to refs_organized_size. (DF_DEFS_SIZE, DF_USES_SIZE): Use refs_organized_size instead of bitmap_size. From-SVN: r123084
This commit is contained in:
parent
0a85ec2e8c
commit
fb4061627b
@ -1,3 +1,26 @@
|
|||||||
|
2007-03-19 Paolo Bonzini <bonzini@gnu.org>
|
||||||
|
|
||||||
|
PR rtl-optimization/30907
|
||||||
|
* fwprop.c (forward_propagate_into): Never propagate inside a loop.
|
||||||
|
(fwprop_init): Always call loop_optimizer_initialize.
|
||||||
|
(fwprop_done): Always call loop_optimizer_finalize.
|
||||||
|
(fwprop): We always have loop info now.
|
||||||
|
(gate_fwprop_addr): Remove.
|
||||||
|
(pass_fwprop_addr): Use gate_fwprop as gate.
|
||||||
|
|
||||||
|
PR rtl-optimization/30841
|
||||||
|
* df-problems.c (df_ru_local_compute, df_rd_local_compute,
|
||||||
|
df_chain_alloc): Call df_reorganize_refs unconditionally.
|
||||||
|
* df-scan.c (df_rescan_blocks, df_reorganize_refs): Change
|
||||||
|
refs_organized to refs_organized_size.
|
||||||
|
(df_ref_create_structure): Use refs_organized_size instead of
|
||||||
|
bitmap_size if refs had been organized, and keep refs_organized_size
|
||||||
|
up-to-date.
|
||||||
|
* df.h (struct df_ref_info): Change refs_organized to
|
||||||
|
refs_organized_size.
|
||||||
|
(DF_DEFS_SIZE, DF_USES_SIZE): Use refs_organized_size instead of
|
||||||
|
bitmap_size.
|
||||||
|
|
||||||
2007-03-19 Mark Mitchell <mark@codesourcery.com>
|
2007-03-19 Mark Mitchell <mark@codesourcery.com>
|
||||||
|
|
||||||
* except.c (output_function_exception_table): Do not reference the
|
* except.c (output_function_exception_table): Do not reference the
|
||||||
|
@ -586,9 +586,7 @@ df_ru_local_compute (struct dataflow *dflow,
|
|||||||
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
|
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
|
||||||
|
|
||||||
df_set_seen ();
|
df_set_seen ();
|
||||||
|
df_reorganize_refs (&df->use_info);
|
||||||
if (!df->use_info.refs_organized)
|
|
||||||
df_reorganize_refs (&df->use_info);
|
|
||||||
|
|
||||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||||
{
|
{
|
||||||
@ -1109,9 +1107,7 @@ df_rd_local_compute (struct dataflow *dflow,
|
|||||||
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
|
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
|
||||||
|
|
||||||
df_set_seen ();
|
df_set_seen ();
|
||||||
|
df_reorganize_refs (&df->def_info);
|
||||||
if (!df->def_info.refs_organized)
|
|
||||||
df_reorganize_refs (&df->def_info);
|
|
||||||
|
|
||||||
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
|
||||||
{
|
{
|
||||||
@ -2771,8 +2767,7 @@ df_chain_alloc (struct dataflow *dflow,
|
|||||||
|
|
||||||
if (dflow->flags & DF_DU_CHAIN)
|
if (dflow->flags & DF_DU_CHAIN)
|
||||||
{
|
{
|
||||||
if (!df->def_info.refs_organized)
|
df_reorganize_refs (&df->def_info);
|
||||||
df_reorganize_refs (&df->def_info);
|
|
||||||
|
|
||||||
/* Clear out the pointers from the refs. */
|
/* Clear out the pointers from the refs. */
|
||||||
for (i = 0; i < DF_DEFS_SIZE (df); i++)
|
for (i = 0; i < DF_DEFS_SIZE (df); i++)
|
||||||
@ -2784,8 +2779,7 @@ df_chain_alloc (struct dataflow *dflow,
|
|||||||
|
|
||||||
if (dflow->flags & DF_UD_CHAIN)
|
if (dflow->flags & DF_UD_CHAIN)
|
||||||
{
|
{
|
||||||
if (!df->use_info.refs_organized)
|
df_reorganize_refs (&df->use_info);
|
||||||
df_reorganize_refs (&df->use_info);
|
|
||||||
for (i = 0; i < DF_USES_SIZE (df); i++)
|
for (i = 0; i < DF_USES_SIZE (df); i++)
|
||||||
{
|
{
|
||||||
struct df_ref *ref = df->use_info.refs[i];
|
struct df_ref *ref = df->use_info.refs[i];
|
||||||
|
@ -435,8 +435,8 @@ df_rescan_blocks (struct df *df, bitmap blocks)
|
|||||||
struct dataflow *dflow = df->problems_by_index[DF_SCAN];
|
struct dataflow *dflow = df->problems_by_index[DF_SCAN];
|
||||||
basic_block bb;
|
basic_block bb;
|
||||||
|
|
||||||
df->def_info.refs_organized = false;
|
df->def_info.refs_organized_size = 0;
|
||||||
df->use_info.refs_organized = false;
|
df->use_info.refs_organized_size = 0;
|
||||||
|
|
||||||
if (blocks)
|
if (blocks)
|
||||||
{
|
{
|
||||||
@ -882,7 +882,7 @@ df_reorganize_refs (struct df_ref_info *ref_info)
|
|||||||
unsigned int offset = 0;
|
unsigned int offset = 0;
|
||||||
unsigned int size = 0;
|
unsigned int size = 0;
|
||||||
|
|
||||||
if (ref_info->refs_organized)
|
if (ref_info->refs_organized_size)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (ref_info->refs_size < ref_info->bitmap_size)
|
if (ref_info->refs_size < ref_info->bitmap_size)
|
||||||
@ -915,7 +915,7 @@ df_reorganize_refs (struct df_ref_info *ref_info)
|
|||||||
reset it now that we have squished out all of the empty
|
reset it now that we have squished out all of the empty
|
||||||
slots. */
|
slots. */
|
||||||
ref_info->bitmap_size = size;
|
ref_info->bitmap_size = size;
|
||||||
ref_info->refs_organized = true;
|
ref_info->refs_organized_size = size;
|
||||||
ref_info->add_refs_inline = true;
|
ref_info->add_refs_inline = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -957,22 +957,25 @@ df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
|
|||||||
case DF_REF_REG_DEF:
|
case DF_REF_REG_DEF:
|
||||||
{
|
{
|
||||||
struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
|
struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
|
||||||
reg_info->n_refs++;
|
unsigned int size = df->def_info.refs_organized_size
|
||||||
|
? df->def_info.refs_organized_size
|
||||||
|
: df->def_info.bitmap_size;
|
||||||
|
|
||||||
/* Add the ref to the reg_def chain. */
|
/* Add the ref to the reg_def chain. */
|
||||||
|
reg_info->n_refs++;
|
||||||
df_reg_chain_create (reg_info, this_ref);
|
df_reg_chain_create (reg_info, this_ref);
|
||||||
DF_REF_ID (this_ref) = df->def_info.bitmap_size;
|
DF_REF_ID (this_ref) = size;
|
||||||
if (df->def_info.add_refs_inline)
|
if (df->def_info.add_refs_inline)
|
||||||
{
|
{
|
||||||
if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
|
if (size >= df->def_info.refs_size)
|
||||||
{
|
{
|
||||||
int new_size = df->def_info.bitmap_size
|
int new_size = size + size / 4;
|
||||||
+ df->def_info.bitmap_size / 4;
|
|
||||||
df_grow_ref_info (&df->def_info, new_size);
|
df_grow_ref_info (&df->def_info, new_size);
|
||||||
}
|
}
|
||||||
/* Add the ref to the big array of defs. */
|
/* Add the ref to the big array of defs. */
|
||||||
DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
|
DF_DEFS_SET (df, size, this_ref);
|
||||||
df->def_info.refs_organized = false;
|
if (df->def_info.refs_organized_size)
|
||||||
|
df->def_info.refs_organized_size++;
|
||||||
}
|
}
|
||||||
|
|
||||||
df->def_info.bitmap_size++;
|
df->def_info.bitmap_size++;
|
||||||
@ -997,22 +1000,25 @@ df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
|
|||||||
case DF_REF_REG_USE:
|
case DF_REF_REG_USE:
|
||||||
{
|
{
|
||||||
struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
|
struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
|
||||||
reg_info->n_refs++;
|
unsigned int size = df->use_info.refs_organized_size
|
||||||
|
? df->use_info.refs_organized_size
|
||||||
|
: df->use_info.bitmap_size;
|
||||||
|
|
||||||
/* Add the ref to the reg_use chain. */
|
/* Add the ref to the reg_use chain. */
|
||||||
|
reg_info->n_refs++;
|
||||||
df_reg_chain_create (reg_info, this_ref);
|
df_reg_chain_create (reg_info, this_ref);
|
||||||
DF_REF_ID (this_ref) = df->use_info.bitmap_size;
|
DF_REF_ID (this_ref) = size;
|
||||||
if (df->use_info.add_refs_inline)
|
if (df->use_info.add_refs_inline)
|
||||||
{
|
{
|
||||||
if (DF_USES_SIZE (df) >= df->use_info.refs_size)
|
if (size >= df->use_info.refs_size)
|
||||||
{
|
{
|
||||||
int new_size = df->use_info.bitmap_size
|
int new_size = size + size / 4;
|
||||||
+ df->use_info.bitmap_size / 4;
|
|
||||||
df_grow_ref_info (&df->use_info, new_size);
|
df_grow_ref_info (&df->use_info, new_size);
|
||||||
}
|
}
|
||||||
/* Add the ref to the big array of defs. */
|
/* Add the ref to the big array of defs. */
|
||||||
DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
|
DF_USES_SET (df, size, this_ref);
|
||||||
df->use_info.refs_organized = false;
|
if (df->def_info.refs_organized_size)
|
||||||
|
df->def_info.refs_organized_size++;
|
||||||
}
|
}
|
||||||
|
|
||||||
df->use_info.bitmap_size++;
|
df->use_info.bitmap_size++;
|
||||||
|
8
gcc/df.h
8
gcc/df.h
@ -323,9 +323,9 @@ struct df_ref_info
|
|||||||
unsigned int refs_size; /* Size of currently allocated refs table. */
|
unsigned int refs_size; /* Size of currently allocated refs table. */
|
||||||
unsigned int bitmap_size; /* Number of refs seen. */
|
unsigned int bitmap_size; /* Number of refs seen. */
|
||||||
|
|
||||||
/* True if refs table is organized so that every reference for a
|
/* >0 if refs table is organized so that every reference for a
|
||||||
pseudo is contiguous. */
|
pseudo is contiguous. */
|
||||||
bool refs_organized;
|
unsigned int refs_organized_size;
|
||||||
/* True if the next refs should be added immediately or false to
|
/* True if the next refs should be added immediately or false to
|
||||||
defer to later to reorganize the table. */
|
defer to later to reorganize the table. */
|
||||||
bool add_refs_inline;
|
bool add_refs_inline;
|
||||||
@ -433,10 +433,10 @@ struct df
|
|||||||
|| DF_REF_REG_MEM_LOAD_P (REF))
|
|| DF_REF_REG_MEM_LOAD_P (REF))
|
||||||
|
|
||||||
/* Macros to get the refs out of def_info or use_info refs table. */
|
/* Macros to get the refs out of def_info or use_info refs table. */
|
||||||
#define DF_DEFS_SIZE(DF) ((DF)->def_info.bitmap_size)
|
#define DF_DEFS_SIZE(DF) ((DF)->def_info.refs_organized_size)
|
||||||
#define DF_DEFS_GET(DF,ID) ((DF)->def_info.refs[(ID)])
|
#define DF_DEFS_GET(DF,ID) ((DF)->def_info.refs[(ID)])
|
||||||
#define DF_DEFS_SET(DF,ID,VAL) ((DF)->def_info.refs[(ID)]=(VAL))
|
#define DF_DEFS_SET(DF,ID,VAL) ((DF)->def_info.refs[(ID)]=(VAL))
|
||||||
#define DF_USES_SIZE(DF) ((DF)->use_info.bitmap_size)
|
#define DF_USES_SIZE(DF) ((DF)->use_info.refs_organized_size)
|
||||||
#define DF_USES_GET(DF,ID) ((DF)->use_info.refs[(ID)])
|
#define DF_USES_GET(DF,ID) ((DF)->use_info.refs[(ID)])
|
||||||
#define DF_USES_SET(DF,ID,VAL) ((DF)->use_info.refs[(ID)]=(VAL))
|
#define DF_USES_SET(DF,ID,VAL) ((DF)->use_info.refs[(ID)]=(VAL))
|
||||||
|
|
||||||
|
26
gcc/fwprop.c
26
gcc/fwprop.c
@ -862,10 +862,8 @@ forward_propagate_into (struct df_ref *use)
|
|||||||
if (DF_REF_FLAGS (def) & DF_REF_ARTIFICIAL)
|
if (DF_REF_FLAGS (def) & DF_REF_ARTIFICIAL)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
/* Do not propagate loop invariant definitions inside the loop if
|
/* Do not propagate loop invariant definitions inside the loop. */
|
||||||
we are going to unroll. */
|
if (DF_REF_BB (def)->loop_father != DF_REF_BB (use)->loop_father)
|
||||||
if (current_loops
|
|
||||||
&& DF_REF_BB (def)->loop_father != DF_REF_BB (use)->loop_father)
|
|
||||||
return;
|
return;
|
||||||
|
|
||||||
/* Check if the use is still present in the insn! */
|
/* Check if the use is still present in the insn! */
|
||||||
@ -900,8 +898,7 @@ fwprop_init (void)
|
|||||||
loops and be careful about them. But we have to call flow_loops_find
|
loops and be careful about them. But we have to call flow_loops_find
|
||||||
before df_analyze, because flow_loops_find may introduce new jump
|
before df_analyze, because flow_loops_find may introduce new jump
|
||||||
insns (sadly) if we are not working in cfglayout mode. */
|
insns (sadly) if we are not working in cfglayout mode. */
|
||||||
if (flag_rerun_cse_after_loop && (flag_unroll_loops || flag_peel_loops))
|
loop_optimizer_init (0);
|
||||||
loop_optimizer_init (0);
|
|
||||||
|
|
||||||
/* Now set up the dataflow problem (we only want use-def chains) and
|
/* Now set up the dataflow problem (we only want use-def chains) and
|
||||||
put the dataflow solver to work. */
|
put the dataflow solver to work. */
|
||||||
@ -915,10 +912,7 @@ static void
|
|||||||
fwprop_done (void)
|
fwprop_done (void)
|
||||||
{
|
{
|
||||||
df_finish (df);
|
df_finish (df);
|
||||||
|
loop_optimizer_finalize ();
|
||||||
if (flag_rerun_cse_after_loop && (flag_unroll_loops || flag_peel_loops))
|
|
||||||
loop_optimizer_finalize ();
|
|
||||||
|
|
||||||
free_dominance_info (CDI_DOMINATORS);
|
free_dominance_info (CDI_DOMINATORS);
|
||||||
cleanup_cfg (0);
|
cleanup_cfg (0);
|
||||||
delete_trivially_dead_insns (get_insns (), max_reg_num ());
|
delete_trivially_dead_insns (get_insns (), max_reg_num ());
|
||||||
@ -957,8 +951,7 @@ fwprop (void)
|
|||||||
{
|
{
|
||||||
struct df_ref *use = DF_USES_GET (df, i);
|
struct df_ref *use = DF_USES_GET (df, i);
|
||||||
if (use)
|
if (use)
|
||||||
if (!current_loops
|
if (DF_REF_TYPE (use) == DF_REF_REG_USE
|
||||||
|| DF_REF_TYPE (use) == DF_REF_REG_USE
|
|
||||||
|| DF_REF_BB (use)->loop_father == NULL)
|
|| DF_REF_BB (use)->loop_father == NULL)
|
||||||
forward_propagate_into (use);
|
forward_propagate_into (use);
|
||||||
}
|
}
|
||||||
@ -985,13 +978,6 @@ struct tree_opt_pass pass_rtl_fwprop =
|
|||||||
0 /* letter */
|
0 /* letter */
|
||||||
};
|
};
|
||||||
|
|
||||||
static bool
|
|
||||||
gate_fwprop_addr (void)
|
|
||||||
{
|
|
||||||
return optimize > 0 && flag_forward_propagate && flag_rerun_cse_after_loop
|
|
||||||
&& (flag_unroll_loops || flag_peel_loops);
|
|
||||||
}
|
|
||||||
|
|
||||||
static unsigned int
|
static unsigned int
|
||||||
fwprop_addr (void)
|
fwprop_addr (void)
|
||||||
{
|
{
|
||||||
@ -1018,7 +1004,7 @@ fwprop_addr (void)
|
|||||||
struct tree_opt_pass pass_rtl_fwprop_addr =
|
struct tree_opt_pass pass_rtl_fwprop_addr =
|
||||||
{
|
{
|
||||||
"fwprop2", /* name */
|
"fwprop2", /* name */
|
||||||
gate_fwprop_addr, /* gate */
|
gate_fwprop, /* gate */
|
||||||
fwprop_addr, /* execute */
|
fwprop_addr, /* execute */
|
||||||
NULL, /* sub */
|
NULL, /* sub */
|
||||||
NULL, /* next */
|
NULL, /* next */
|
||||||
|
Loading…
Reference in New Issue
Block a user