revert: re PR rtl-optimization/11320 (Scheduler bug)

Revert
	2003-07-10  Eric Botcazou  <ebotcazou@libertysurf.fr>
	PR rtl-optimization/11320
	* sched-int.h (struct deps) [reg_conditional_sets]: New field.
	(struct sched_info) [compute_jump_reg_dependencies]: New prototype.
	* sched-deps.c (sched_analyze_insn) [JUMP_INSN]: Update call to
	current_sched_info->compute_jump_reg_dependencies. Record which
	registers are used and which registers are set by the jump.
	Clear deps->reg_conditional_sets after a barrier.
	Set deps->reg_conditional_sets if the insn is a COND_EXEC.
	Clear deps->reg_conditional_sets if the insn is not a COND_EXEC.
	(init_deps): Initialize reg_conditional_sets.
	(free_deps): Clear reg_conditional_sets.
	* sched-ebb.c (compute_jump_reg_dependencies): New prototype.
	Mark registers live on entry of the fallthrough block and conditionally
	set as set by the jump. Mark registers live on entry of non-fallthrough
	blocks as used by the jump.
	* sched-rgn.c (compute_jump_reg_dependencies): New prototype.
	Mark new parameters as unused.

From-SVN: r176315
This commit is contained in:
Bernd Schmidt 2011-07-15 13:20:10 +00:00 committed by Bernd Schmidt
parent ce6e60b2a4
commit aef0e7a8c2
6 changed files with 40 additions and 51 deletions

View File

@ -49,6 +49,26 @@
* config/c6x/eqf.c: New file.
* config/c6x/libgcc-c6xeabi.ver: New file.
Revert
2003-07-10 Eric Botcazou <ebotcazou@libertysurf.fr>
PR rtl-optimization/11320
* sched-int.h (struct deps) [reg_conditional_sets]: New field.
(struct sched_info) [compute_jump_reg_dependencies]: New prototype.
* sched-deps.c (sched_analyze_insn) [JUMP_INSN]: Update call to
current_sched_info->compute_jump_reg_dependencies. Record which
registers are used and which registers are set by the jump.
Clear deps->reg_conditional_sets after a barrier.
Set deps->reg_conditional_sets if the insn is a COND_EXEC.
Clear deps->reg_conditional_sets if the insn is not a COND_EXEC.
(init_deps): Initialize reg_conditional_sets.
(free_deps): Clear reg_conditional_sets.
* sched-ebb.c (compute_jump_reg_dependencies): New prototype.
Mark registers live on entry of the fallthrough block and conditionally
set as set by the jump. Mark registers live on entry of non-fallthrough
blocks as used by the jump.
* sched-rgn.c (compute_jump_reg_dependencies): New prototype.
Mark new parameters as unused.
2011-07-14 Andrew Pinski <pinskia@gmail.com>
PR tree-opt/49309

View File

@ -252,9 +252,7 @@ sms_print_insn (const_rtx insn, int aligned ATTRIBUTE_UNUSED)
static void
compute_jump_reg_dependencies (rtx insn ATTRIBUTE_UNUSED,
regset cond_exec ATTRIBUTE_UNUSED,
regset used ATTRIBUTE_UNUSED,
regset set ATTRIBUTE_UNUSED)
regset used ATTRIBUTE_UNUSED)
{
}

View File

@ -579,7 +579,7 @@ conditions_mutex_p (const_rtx cond1, const_rtx cond2, bool rev1, bool rev2)
(rev1==rev2
? reversed_comparison_code (cond2, NULL)
: GET_CODE (cond2))
&& XEXP (cond1, 0) == XEXP (cond2, 0)
&& rtx_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
&& XEXP (cond1, 1) == XEXP (cond2, 1))
return 1;
return 0;
@ -2751,14 +2751,13 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx insn)
if (sched_deps_info->compute_jump_reg_dependencies)
{
regset_head tmp_uses, tmp_sets;
INIT_REG_SET (&tmp_uses);
INIT_REG_SET (&tmp_sets);
regset_head tmp;
INIT_REG_SET (&tmp);
(*sched_deps_info->compute_jump_reg_dependencies) (insn, &tmp);
(*sched_deps_info->compute_jump_reg_dependencies)
(insn, &deps->reg_conditional_sets, &tmp_uses, &tmp_sets);
/* Make latency of jump equal to 0 by using anti-dependence. */
EXECUTE_IF_SET_IN_REG_SET (&tmp_uses, 0, i, rsi)
EXECUTE_IF_SET_IN_REG_SET (&tmp, 0, i, rsi)
{
struct deps_reg *reg_last = &deps->reg_last[i];
add_dependence_list (insn, reg_last->sets, 0, REG_DEP_ANTI);
@ -2773,10 +2772,8 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx insn)
reg_last->uses = alloc_INSN_LIST (insn, reg_last->uses);
}
}
IOR_REG_SET (reg_pending_sets, &tmp_sets);
CLEAR_REG_SET (&tmp_uses);
CLEAR_REG_SET (&tmp_sets);
CLEAR_REG_SET (&tmp);
}
/* All memory writes and volatile reads must happen before the
@ -2949,10 +2946,7 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx insn)
add_dependence_list (insn, reg_last->uses, 0, REG_DEP_ANTI);
if (!deps->readonly)
{
reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
SET_REGNO_REG_SET (&deps->reg_conditional_sets, i);
}
reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
}
}
else
@ -3014,7 +3008,6 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx insn)
reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
reg_last->uses_length = 0;
reg_last->clobbers_length = 0;
CLEAR_REGNO_REG_SET (&deps->reg_conditional_sets, i);
}
}
}
@ -3112,8 +3105,6 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx insn)
&& sel_insn_is_speculation_check (insn)))
flush_pending_lists (deps, insn, true, true);
if (!deps->readonly)
CLEAR_REG_SET (&deps->reg_conditional_sets);
reg_pending_barrier = NOT_A_BARRIER;
}
@ -3555,7 +3546,6 @@ init_deps (struct deps_desc *deps, bool lazy_reg_last)
else
deps->reg_last = XCNEWVEC (struct deps_reg, max_reg);
INIT_REG_SET (&deps->reg_last_in_use);
INIT_REG_SET (&deps->reg_conditional_sets);
deps->pending_read_insns = 0;
deps->pending_read_mems = 0;
@ -3624,7 +3614,6 @@ free_deps (struct deps_desc *deps)
free_INSN_LIST_list (&reg_last->clobbers);
}
CLEAR_REG_SET (&deps->reg_last_in_use);
CLEAR_REG_SET (&deps->reg_conditional_sets);
/* As we initialize reg_last lazily, it is possible that we didn't allocate
it at all. */
@ -3634,8 +3623,7 @@ free_deps (struct deps_desc *deps)
deps = NULL;
}
/* Remove INSN from dependence contexts DEPS. Caution: reg_conditional_sets
is not handled. */
/* Remove INSN from dependence contexts DEPS. */
void
remove_from_deps (struct deps_desc *deps, rtx insn)
{

View File

@ -257,28 +257,18 @@ ebb_contributes_to_priority (rtx next ATTRIBUTE_UNUSED,
return 1;
}
/* INSN is a JUMP_INSN, COND_SET is the set of registers that are
conditionally set before INSN. Store the set of registers that
must be considered as used by this jump in USED and that of
registers that must be considered as set in SET. */
/* INSN is a JUMP_INSN. Store the set of registers that
must be considered as used by this jump in USED. */
void
ebb_compute_jump_reg_dependencies (rtx insn, regset cond_set, regset used,
regset set)
ebb_compute_jump_reg_dependencies (rtx insn, regset used)
{
basic_block b = BLOCK_FOR_INSN (insn);
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, b->succs)
if (e->flags & EDGE_FALLTHRU)
/* The jump may be a by-product of a branch that has been merged
in the main codepath after being conditionalized. Therefore
it may guard the fallthrough block from using a value that has
conditionally overwritten that of the main codepath. So we
consider that it restores the value of the main codepath. */
bitmap_and (set, df_get_live_in (e->dest), cond_set);
else
if ((e->flags & EDGE_FALLTHRU) == 0)
bitmap_ior_into (used, df_get_live_in (e->dest));
}

View File

@ -173,7 +173,7 @@ extern struct ready_list ready;
extern int max_issue (struct ready_list *, int, state_t, bool, int *);
extern void ebb_compute_jump_reg_dependencies (rtx, regset, regset, regset);
extern void ebb_compute_jump_reg_dependencies (rtx, regset);
extern edge find_fallthru_edge_from (basic_block);
@ -517,9 +517,6 @@ struct deps_desc
in reg_last[N].{uses,sets,clobbers}. */
regset_head reg_last_in_use;
/* Element N is set for each register that is conditionally set. */
regset_head reg_conditional_sets;
/* Shows the last value of reg_pending_barrier associated with the insn. */
enum reg_pending_barrier_mode last_reg_pending_barrier;
@ -1147,7 +1144,7 @@ struct sched_deps_info_def
/* Called when computing dependencies for a JUMP_INSN. This function
should store the set of registers that must be considered as set by
the jump in the regset. */
void (*compute_jump_reg_dependencies) (rtx, regset, regset, regset);
void (*compute_jump_reg_dependencies) (rtx, regset);
/* Start analyzing insn. */
void (*start_insn) (rtx);

View File

@ -2062,7 +2062,7 @@ static ds_t new_ready (rtx, ds_t);
static int schedule_more_p (void);
static const char *rgn_print_insn (const_rtx, int);
static int rgn_rank (rtx, rtx);
static void compute_jump_reg_dependencies (rtx, regset, regset, regset);
static void compute_jump_reg_dependencies (rtx, regset);
/* Functions for speculative scheduling. */
static void rgn_add_remove_insn (rtx, int);
@ -2295,16 +2295,12 @@ contributes_to_priority (rtx next, rtx insn)
return BLOCK_TO_BB (BLOCK_NUM (next)) == BLOCK_TO_BB (BLOCK_NUM (insn));
}
/* INSN is a JUMP_INSN, COND_SET is the set of registers that are
conditionally set before INSN. Store the set of registers that
must be considered as used by this jump in USED and that of
registers that must be considered as set in SET. */
/* INSN is a JUMP_INSN. Store the set of registers that must be
considered as used by this jump in USED. */
static void
compute_jump_reg_dependencies (rtx insn ATTRIBUTE_UNUSED,
regset cond_exec ATTRIBUTE_UNUSED,
regset used ATTRIBUTE_UNUSED,
regset set ATTRIBUTE_UNUSED)
regset used ATTRIBUTE_UNUSED)
{
/* Nothing to do here, since we postprocess jumps in
add_branch_dependences. */