re PR debug/43092 (Wrong debuginfo with VTA and -fomit-frame-pointer/-mno-accumulate-outgoing-args)

PR debug/43092
PR rtl-optimization/43494
* rtl.h (for_each_inc_dec_fn): New type.
(for_each_inc_dec): Declare.
* rtlanal.c (struct for_each_inc_dec_ops): New type.
(for_each_inc_dec_find_inc_dec): New fn.
(for_each_inc_dec_find_mem): New fn.
(for_each_inc_dec): New fn.
* dse.c (struct insn_size): Remove.
(replace_inc_dec, replace_inc_dec_mem): Remove.
(emit_inc_dec_insn_before): New fn.
(check_for_inc_dec): Use it, along with for_each_inc_dec.
(canon_address): Pass mem modes to cselib_lookup.
* cselib.h (cselib_lookup): Add memmode argument.  Adjust callers.
(cselib_lookup_from_insn): Likewise.
(cselib_subst_to_values): Likewise.
* cselib.c (find_slot_memmode): New var.
(cselib_find_slot): New fn.  Use it instead of
htab_find_slot_with_hash everywhere.
(entry_and_rtx_equal_p): Use find_slot_memmode.
(autoinc_split): New fn.
(rtx_equal_for_cselib_p): Rename and implement in terms of...
(rtx_equal_for_cselib_1): ... this.  Take memmode, pass it on.
Deal with autoinc.  Special-case recursion into MEMs.
(cselib_hash_rtx): Likewise.
(cselib_lookup_mem): Infer pmode from address mode.  Distinguish
address and MEM modes.
(cselib_subst_to_values): Add memmode, pass it on.
Deal with autoinc.
(cselib_lookup): Add memmode argument, pass it on.
(cselib_lookup_from_insn): Add memmode.
(cselib_invalidate_rtx): Discard obsolete push_operand handling.
(struct cselib_record_autoinc_data): New.
(cselib_record_autoinc_cb): New fn.
(cselib_record_sets): Use it, along with for_each_inc_dec.  Pass MEM
mode to cselib_lookup.  Reset autoinced REGs here instead of...
(cselib_process_insn): ... here.
* var-tracking.c (replace_expr_with_values, use_type): Pass MEM mode
to cselib_lookup.
(add_uses): Likewise, also to cselib_subst_to_values.
(add_stores): Likewise.
* sched-deps.c 	(add_insn_mem_dependence): Pass mode to
cselib_subst_to_values.
(sched_analyze_1, sched_analyze_2): Likewise.  Adjusted.
* gcse.c (do_local_cprop): Adjusted.
* postreload.c (reload_cse_simplify_set): Adjusted.
(reload_cse_simplify_operands): Adjusted.
* sel-sched-dump (debug_mem_addr_value): Pass mode.

From-SVN: r169782
This commit is contained in:
Alexandre Oliva 2011-02-03 06:04:04 +00:00 committed by Alexandre Oliva
parent 1551d44aba
commit 4deef538ec
11 changed files with 472 additions and 170 deletions

View File

@ -1,3 +1,54 @@
2011-02-03 Alexandre Oliva <aoliva@redhat.com>
PR debug/43092
PR rtl-optimization/43494
* rtl.h (for_each_inc_dec_fn): New type.
(for_each_inc_dec): Declare.
* rtlanal.c (struct for_each_inc_dec_ops): New type.
(for_each_inc_dec_find_inc_dec): New fn.
(for_each_inc_dec_find_mem): New fn.
(for_each_inc_dec): New fn.
* dse.c (struct insn_size): Remove.
(replace_inc_dec, replace_inc_dec_mem): Remove.
(emit_inc_dec_insn_before): New fn.
(check_for_inc_dec): Use it, along with for_each_inc_dec.
(canon_address): Pass mem modes to cselib_lookup.
* cselib.h (cselib_lookup): Add memmode argument. Adjust callers.
(cselib_lookup_from_insn): Likewise.
(cselib_subst_to_values): Likewise.
* cselib.c (find_slot_memmode): New var.
(cselib_find_slot): New fn. Use it instead of
htab_find_slot_with_hash everywhere.
(entry_and_rtx_equal_p): Use find_slot_memmode.
(autoinc_split): New fn.
(rtx_equal_for_cselib_p): Rename and implement in terms of...
(rtx_equal_for_cselib_1): ... this. Take memmode, pass it on.
Deal with autoinc. Special-case recursion into MEMs.
(cselib_hash_rtx): Likewise.
(cselib_lookup_mem): Infer pmode from address mode. Distinguish
address and MEM modes.
(cselib_subst_to_values): Add memmode, pass it on.
Deal with autoinc.
(cselib_lookup): Add memmode argument, pass it on.
(cselib_lookup_from_insn): Add memmode.
(cselib_invalidate_rtx): Discard obsolete push_operand handling.
(struct cselib_record_autoinc_data): New.
(cselib_record_autoinc_cb): New fn.
(cselib_record_sets): Use it, along with for_each_inc_dec. Pass MEM
mode to cselib_lookup. Reset autoinced REGs here instead of...
(cselib_process_insn): ... here.
* var-tracking.c (replace_expr_with_values, use_type): Pass MEM mode
to cselib_lookup.
(add_uses): Likewise, also to cselib_subst_to_values.
(add_stores): Likewise.
* sched-deps.c (add_insn_mem_dependence): Pass mode to
cselib_subst_to_values.
(sched_analyze_1, sched_analyze_2): Likewise. Adjusted.
* gcse.c (do_local_cprop): Adjusted.
* postreload.c (reload_cse_simplify_set): Adjusted.
(reload_cse_simplify_operands): Adjusted.
* sel-sched-dump (debug_mem_addr_value): Pass mode.
2011-02-03 Alexandre Oliva <aoliva@redhat.com>
PR tree-optimization/45122

View File

@ -62,7 +62,8 @@ static void unchain_one_elt_loc_list (struct elt_loc_list **);
static int discard_useless_locs (void **, void *);
static int discard_useless_values (void **, void *);
static void remove_useless_values (void);
static unsigned int cselib_hash_rtx (rtx, int);
static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
static cselib_val *cselib_lookup_mem (rtx, int);
@ -390,6 +391,26 @@ cselib_get_next_uid (void)
return next_uid;
}
/* See the documentation of cselib_find_slot below. */
static enum machine_mode find_slot_memmode;
/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
INSERTing if requested. When X is part of the address of a MEM,
MEMMODE should specify the mode of the MEM. While searching the
table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
in X can be resolved. */
static void **
cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
enum machine_mode memmode)
{
void **slot;
find_slot_memmode = memmode;
slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
find_slot_memmode = VOIDmode;
return slot;
}
/* The equality test for our hash table. The first argument ENTRY is a table
element (i.e. a cselib_val), while the second arg X is an rtx. We know
that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
@ -419,7 +440,7 @@ entry_and_rtx_equal_p (const void *entry, const void *x_arg)
/* We don't guarantee that distinct rtx's have different hash values,
so we need to do a comparison. */
for (l = v->locs; l; l = l->next)
if (rtx_equal_for_cselib_p (l->loc, x))
if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
{
promote_debug_loc (l);
return 1;
@ -630,6 +651,58 @@ cselib_reg_set_mode (const_rtx x)
int
rtx_equal_for_cselib_p (rtx x, rtx y)
{
return rtx_equal_for_cselib_1 (x, y, VOIDmode);
}
/* If x is a PLUS or an autoinc operation, expand the operation,
storing the offset, if any, in *OFF. */
static rtx
autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
{
switch (GET_CODE (x))
{
case PLUS:
*off = XEXP (x, 1);
return XEXP (x, 0);
case PRE_DEC:
if (memmode == VOIDmode)
return x;
*off = GEN_INT (-GET_MODE_SIZE (memmode));
return XEXP (x, 0);
break;
case PRE_INC:
if (memmode == VOIDmode)
return x;
*off = GEN_INT (GET_MODE_SIZE (memmode));
return XEXP (x, 0);
case PRE_MODIFY:
return XEXP (x, 1);
case POST_DEC:
case POST_INC:
case POST_MODIFY:
return XEXP (x, 0);
default:
return x;
}
}
/* Return nonzero if we can prove that X and Y contain the same value,
taking our gathered information into account. MEMMODE holds the
mode of the enclosing MEM, if any, as required to deal with autoinc
addressing modes. If X and Y are not (known to be) part of
addresses, MEMMODE should be VOIDmode. */
static int
rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
{
enum rtx_code code;
const char *fmt;
@ -637,7 +710,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
if (REG_P (x) || MEM_P (x))
{
cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
if (e)
x = e->val_rtx;
@ -645,7 +718,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
if (REG_P (y) || MEM_P (y))
{
cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
if (e)
y = e->val_rtx;
@ -669,7 +742,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
/* Avoid infinite recursion. */
if (REG_P (t) || MEM_P (t))
continue;
else if (rtx_equal_for_cselib_p (t, y))
else if (rtx_equal_for_cselib_1 (t, y, memmode))
return 1;
}
@ -687,16 +760,37 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
if (REG_P (t) || MEM_P (t))
continue;
else if (rtx_equal_for_cselib_p (x, t))
else if (rtx_equal_for_cselib_1 (x, t, memmode))
return 1;
}
return 0;
}
if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
if (GET_MODE (x) != GET_MODE (y))
return 0;
if (GET_CODE (x) != GET_CODE (y))
{
rtx xorig = x, yorig = y;
rtx xoff = NULL, yoff = NULL;
x = autoinc_split (x, &xoff, memmode);
y = autoinc_split (y, &yoff, memmode);
if (!xoff != !yoff)
return 0;
if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
return 0;
/* Don't recurse if nothing changed. */
if (x != xorig || y != yorig)
return rtx_equal_for_cselib_1 (x, y, memmode);
return 0;
}
/* These won't be handled correctly by the code below. */
switch (GET_CODE (x))
{
@ -712,6 +806,11 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
case LABEL_REF:
return XEXP (x, 0) == XEXP (y, 0);
case MEM:
/* We have to compare any autoinc operations in the addresses
using this MEM's mode. */
return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
default:
break;
}
@ -744,18 +843,18 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
/* And the corresponding elements must match. */
for (j = 0; j < XVECLEN (x, i); j++)
if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
XVECEXP (y, i, j)))
if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
XVECEXP (y, i, j), memmode))
return 0;
break;
case 'e':
if (i == 1
&& targetm.commutative_p (x, UNKNOWN)
&& rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0))
&& rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1)))
&& rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
&& rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
return 1;
if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
return 0;
break;
@ -807,6 +906,8 @@ wrap_constant (enum machine_mode mode, rtx x)
that take commutativity into account.
If we wanted to also support associative rules, we'd have to use a different
strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
MEMMODE indicates the mode of an enclosing MEM, and it's only
used to compute autoinc values.
We used to have a MODE argument for hashing for CONST_INTs, but that
didn't make sense, since it caused spurious hash differences between
(set (reg:SI 1) (const_int))
@ -817,7 +918,7 @@ wrap_constant (enum machine_mode mode, rtx x)
in a comparison anyway, since relying on hash differences is unsafe. */
static unsigned int
cselib_hash_rtx (rtx x, int create)
cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
{
cselib_val *e;
int i, j;
@ -832,7 +933,7 @@ cselib_hash_rtx (rtx x, int create)
{
case MEM:
case REG:
e = cselib_lookup (x, GET_MODE (x), create);
e = cselib_lookup (x, GET_MODE (x), create, memmode);
if (! e)
return 0;
@ -878,7 +979,7 @@ cselib_hash_rtx (rtx x, int create)
for (i = 0; i < units; ++i)
{
elt = CONST_VECTOR_ELT (x, i);
hash += cselib_hash_rtx (elt, 0);
hash += cselib_hash_rtx (elt, 0, memmode);
}
return hash;
@ -911,10 +1012,28 @@ cselib_hash_rtx (rtx x, int create)
case PRE_DEC:
case PRE_INC:
/* We can't compute these without knowing the MEM mode. */
gcc_assert (memmode != VOIDmode);
i = GET_MODE_SIZE (memmode);
if (code == PRE_DEC)
i = -i;
/* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
like (mem:MEMMODE (plus (reg) (const_int I))). */
hash += (unsigned) PLUS - (unsigned)code
+ cselib_hash_rtx (XEXP (x, 0), create, memmode)
+ cselib_hash_rtx (GEN_INT (i), create, memmode);
return hash ? hash : 1 + (unsigned) PLUS;
case PRE_MODIFY:
gcc_assert (memmode != VOIDmode);
return cselib_hash_rtx (XEXP (x, 1), create, memmode);
case POST_DEC:
case POST_INC:
case POST_MODIFY:
case PRE_MODIFY:
gcc_assert (memmode != VOIDmode);
return cselib_hash_rtx (XEXP (x, 0), create, memmode);
case PC:
case CC0:
case CALL:
@ -940,7 +1059,7 @@ cselib_hash_rtx (rtx x, int create)
case 'e':
{
rtx tem = XEXP (x, i);
unsigned int tem_hash = cselib_hash_rtx (tem, create);
unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
if (tem_hash == 0)
return 0;
@ -952,7 +1071,7 @@ cselib_hash_rtx (rtx x, int create)
for (j = 0; j < XVECLEN (x, i); j++)
{
unsigned int tem_hash
= cselib_hash_rtx (XVECEXP (x, i, j), create);
= cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
if (tem_hash == 0)
return 0;
@ -1065,6 +1184,7 @@ static cselib_val *
cselib_lookup_mem (rtx x, int create)
{
enum machine_mode mode = GET_MODE (x);
enum machine_mode addr_mode;
void **slot;
cselib_val *addr;
cselib_val *mem_elt;
@ -1075,8 +1195,12 @@ cselib_lookup_mem (rtx x, int create)
|| (FLOAT_MODE_P (mode) && flag_float_store))
return 0;
addr_mode = GET_MODE (XEXP (x, 0));
if (addr_mode == VOIDmode)
addr_mode = Pmode;
/* Look up the value for the address. */
addr = cselib_lookup (XEXP (x, 0), mode, create);
addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
if (! addr)
return 0;
@ -1093,8 +1217,8 @@ cselib_lookup_mem (rtx x, int create)
mem_elt = new_cselib_val (next_uid, mode, x);
add_mem_for_addr (addr, mem_elt, x);
slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
mem_elt->hash, INSERT);
slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
INSERT, mode);
*slot = mem_elt;
return mem_elt;
}
@ -1526,10 +1650,11 @@ cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
with VALUE expressions. This way, it becomes independent of changes
to registers and memory.
X isn't actually modified; if modifications are needed, new rtl is
allocated. However, the return value can share rtl with X. */
allocated. However, the return value can share rtl with X.
If X is within a MEM, MEMMODE must be the mode of the MEM. */
rtx
cselib_subst_to_values (rtx x)
cselib_subst_to_values (rtx x, enum machine_mode memmode)
{
enum rtx_code code = GET_CODE (x);
const char *fmt = GET_RTX_FORMAT (code);
@ -1552,10 +1677,11 @@ cselib_subst_to_values (rtx x)
case MEM:
e = cselib_lookup_mem (x, 0);
/* This used to happen for autoincrements, but we deal with them
properly now. Remove the if stmt for the next release. */
if (! e)
{
/* This happens for autoincrements. Assign a value that doesn't
match any other. */
/* Assign a value that doesn't match any other. */
e = new_cselib_val (next_uid, GET_MODE (x), x);
}
return e->val_rtx;
@ -1566,14 +1692,24 @@ cselib_subst_to_values (rtx x)
case CONST_FIXED:
return x;
case POST_INC:
case PRE_INC:
case POST_DEC:
case PRE_DEC:
case POST_MODIFY:
case PRE_INC:
gcc_assert (memmode != VOIDmode);
i = GET_MODE_SIZE (memmode);
if (code == PRE_DEC)
i = -i;
return cselib_subst_to_values (plus_constant (XEXP (x, 0), i),
memmode);
case PRE_MODIFY:
e = new_cselib_val (next_uid, GET_MODE (x), x);
return e->val_rtx;
gcc_assert (memmode != VOIDmode);
return cselib_subst_to_values (XEXP (x, 1), memmode);
case POST_DEC:
case POST_INC:
case POST_MODIFY:
gcc_assert (memmode != VOIDmode);
return cselib_subst_to_values (XEXP (x, 0), memmode);
default:
break;
@ -1583,7 +1719,7 @@ cselib_subst_to_values (rtx x)
{
if (fmt[i] == 'e')
{
rtx t = cselib_subst_to_values (XEXP (x, i));
rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
if (t != XEXP (x, i))
{
@ -1598,7 +1734,7 @@ cselib_subst_to_values (rtx x)
for (j = 0; j < XVECLEN (x, i); j++)
{
rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
if (t != XVECEXP (x, i, j))
{
@ -1617,13 +1753,16 @@ cselib_subst_to_values (rtx x)
return copy;
}
/* Look up the rtl expression X in our tables and return the value it has.
If CREATE is zero, we return NULL if we don't know the value. Otherwise,
we create a new one if possible, using mode MODE if X doesn't have a mode
(i.e. because it's a constant). */
/* Look up the rtl expression X in our tables and return the value it
has. If CREATE is zero, we return NULL if we don't know the value.
Otherwise, we create a new one if possible, using mode MODE if X
doesn't have a mode (i.e. because it's a constant). When X is part
of an address, MEMMODE should be the mode of the enclosing MEM if
we're tracking autoinc expressions. */
static cselib_val *
cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
cselib_lookup_1 (rtx x, enum machine_mode mode,
int create, enum machine_mode memmode)
{
void **slot;
cselib_val *e;
@ -1672,7 +1811,7 @@ cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
}
REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
slot = cselib_find_slot (x, e->hash, INSERT, memmode);
*slot = e;
return e;
}
@ -1680,13 +1819,13 @@ cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
if (MEM_P (x))
return cselib_lookup_mem (x, create);
hashval = cselib_hash_rtx (x, create);
hashval = cselib_hash_rtx (x, create, memmode);
/* Can't even create if hashing is not possible. */
if (! hashval)
return 0;
slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
hashval, create ? INSERT : NO_INSERT);
slot = cselib_find_slot (wrap_constant (mode, x), hashval,
create ? INSERT : NO_INSERT, memmode);
if (slot == 0)
return 0;
@ -1700,7 +1839,8 @@ cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
the hash table is inconsistent until we do so, and
cselib_subst_to_values will need to do lookups. */
*slot = (void *) e;
e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
e->locs = new_elt_loc_list (e->locs,
cselib_subst_to_values (x, memmode));
return e;
}
@ -1708,14 +1848,14 @@ cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
cselib_val *
cselib_lookup_from_insn (rtx x, enum machine_mode mode,
int create, rtx insn)
int create, enum machine_mode memmode, rtx insn)
{
cselib_val *ret;
gcc_assert (!cselib_current_insn);
cselib_current_insn = insn;
ret = cselib_lookup (x, mode, create);
ret = cselib_lookup (x, mode, create, memmode);
cselib_current_insn = NULL;
@ -1726,9 +1866,10 @@ cselib_lookup_from_insn (rtx x, enum machine_mode mode,
maintains invariants related with debug insns. */
cselib_val *
cselib_lookup (rtx x, enum machine_mode mode, int create)
cselib_lookup (rtx x, enum machine_mode mode,
int create, enum machine_mode memmode)
{
cselib_val *ret = cselib_lookup_1 (x, mode, create);
cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
/* ??? Should we return NULL if we're not to create an entry, the
found loc is a debug loc and cselib_current_insn is not DEBUG?
@ -1913,7 +2054,7 @@ cselib_invalidate_mem (rtx mem_rtx)
/* This one overlaps. */
/* We must have a mapping from this MEM's address to the
value (E). Remove that, too. */
addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
mem_chain = &addr->addr_list;
for (;;)
{
@ -1963,13 +2104,6 @@ cselib_invalidate_rtx (rtx dest)
cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
else if (MEM_P (dest))
cselib_invalidate_mem (dest);
/* Some machines don't define AUTO_INC_DEC, but they still use push
instructions. We need to catch that case here in order to
invalidate the stack pointer correctly. Note that invalidating
the stack pointer is different from invalidating DEST. */
if (push_operand (dest, GET_MODE (dest)))
cselib_invalidate_rtx (stack_pointer_rtx);
}
/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
@ -2032,7 +2166,35 @@ cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
in a PARALLEL. Since it's fairly cheap, use a really large number. */
#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
/* Record the effects of any sets in INSN. */
struct cselib_record_autoinc_data
{
struct cselib_set *sets;
int n_sets;
};
/* Callback for for_each_inc_dec. Records in ARG the SETs implied by
autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
static int
cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
rtx dest, rtx src, rtx srcoff, void *arg)
{
struct cselib_record_autoinc_data *data;
data = (struct cselib_record_autoinc_data *)arg;
data->sets[data->n_sets].dest = dest;
if (srcoff)
data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
else
data->sets[data->n_sets].src = src;
data->n_sets++;
return -1;
}
/* Record the effects of any sets and autoincs in INSN. */
static void
cselib_record_sets (rtx insn)
{
@ -2041,6 +2203,8 @@ cselib_record_sets (rtx insn)
struct cselib_set sets[MAX_SETS];
rtx body = PATTERN (insn);
rtx cond = 0;
int n_sets_before_autoinc;
struct cselib_record_autoinc_data data;
body = PATTERN (insn);
if (GET_CODE (body) == COND_EXEC)
@ -2084,6 +2248,11 @@ cselib_record_sets (rtx insn)
sets[0].src = XEXP (note, 0);
}
data.sets = sets;
data.n_sets = n_sets_before_autoinc = n_sets;
for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
n_sets = data.n_sets;
/* Look up the values that are read. Do this before invalidating the
locations that are written. */
for (i = 0; i < n_sets; i++)
@ -2102,14 +2271,15 @@ cselib_record_sets (rtx insn)
rtx src = sets[i].src;
if (cond)
src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
if (MEM_P (dest))
{
enum machine_mode address_mode
= targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
address_mode, 1);
address_mode, 1,
GET_MODE (dest));
}
else
sets[i].dest_addr_elt = 0;
@ -2124,6 +2294,9 @@ cselib_record_sets (rtx insn)
locations may go away. */
note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
for (i = n_sets_before_autoinc; i < n_sets; i++)
cselib_invalidate_rtx (sets[i].dest);
/* If this is an asm, look for duplicate sets. This can happen when the
user uses the same value as an output multiple times. This is valid
if the outputs are not actually used thereafter. Treat this case as
@ -2208,15 +2381,6 @@ cselib_process_insn (rtx insn)
cselib_record_sets (insn);
#ifdef AUTO_INC_DEC
/* Clobber any registers which appear in REG_INC notes. We
could keep track of the changes to their values, but it is
unlikely to help. */
for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
if (REG_NOTE_KIND (x) == REG_INC)
cselib_invalidate_rtx (XEXP (x, 0));
#endif
/* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
after we have processed the insn. */
if (CALL_P (insn))

View File

@ -70,8 +70,10 @@ extern void (*cselib_discard_hook) (cselib_val *);
extern void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
int n_sets);
extern cselib_val *cselib_lookup (rtx, enum machine_mode, int);
extern cselib_val *cselib_lookup_from_insn (rtx, enum machine_mode, int, rtx);
extern cselib_val *cselib_lookup (rtx, enum machine_mode,
int, enum machine_mode);
extern cselib_val *cselib_lookup_from_insn (rtx, enum machine_mode,
int, enum machine_mode, rtx);
extern void cselib_init (int);
extern void cselib_clear_table (void);
extern void cselib_finish (void);
@ -85,7 +87,7 @@ extern rtx cselib_expand_value_rtx_cb (rtx, bitmap, int,
cselib_expand_callback, void *);
extern bool cselib_dummy_expand_value_rtx_cb (rtx, bitmap, int,
cselib_expand_callback, void *);
extern rtx cselib_subst_to_values (rtx);
extern rtx cselib_subst_to_values (rtx, enum machine_mode);
extern void cselib_invalidate_rtx (rtx);
extern void cselib_reset_table (unsigned int);

View File

@ -806,82 +806,25 @@ free_store_info (insn_info_t insn_info)
insn_info->store_rec = NULL;
}
struct insn_size {
int size;
rtx insn;
};
/* Add an insn to do the add inside a x if it is a
PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
the size of the mode of the MEM that this is inside of. */
/* Callback for for_each_inc_dec that emits an INSN that sets DEST to
SRC + SRCOFF before insn ARG. */
static int
replace_inc_dec (rtx *r, void *d)
emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
rtx op ATTRIBUTE_UNUSED,
rtx dest, rtx src, rtx srcoff, void *arg)
{
rtx x = *r;
struct insn_size *data = (struct insn_size *)d;
switch (GET_CODE (x))
{
case PRE_INC:
case POST_INC:
{
rtx r1 = XEXP (x, 0);
rtx c = gen_int_mode (data->size, GET_MODE (r1));
emit_insn_before (gen_rtx_SET (VOIDmode, r1,
gen_rtx_PLUS (GET_MODE (r1), r1, c)),
data->insn);
return -1;
}
rtx insn = (rtx)arg;
case PRE_DEC:
case POST_DEC:
{
rtx r1 = XEXP (x, 0);
rtx c = gen_int_mode (-data->size, GET_MODE (r1));
emit_insn_before (gen_rtx_SET (VOIDmode, r1,
gen_rtx_PLUS (GET_MODE (r1), r1, c)),
data->insn);
return -1;
}
if (srcoff)
src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
case PRE_MODIFY:
case POST_MODIFY:
{
/* We can reuse the add because we are about to delete the
insn that contained it. */
rtx add = XEXP (x, 0);
rtx r1 = XEXP (add, 0);
emit_insn_before (gen_rtx_SET (VOIDmode, r1, add), data->insn);
return -1;
}
/* We can reuse all operands without copying, because we are about
to delete the insn that contained it. */
default:
return 0;
}
}
emit_insn_before (gen_rtx_SET (VOIDmode, dest, src), insn);
/* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
and generate an add to replace that. */
static int
replace_inc_dec_mem (rtx *r, void *d)
{
rtx x = *r;
if (x != NULL_RTX && MEM_P (x))
{
struct insn_size data;
data.size = GET_MODE_SIZE (GET_MODE (x));
data.insn = (rtx) d;
for_each_rtx (&XEXP (x, 0), replace_inc_dec, &data);
return -1;
}
return 0;
return -1;
}
/* Before we delete INSN, make sure that the auto inc/dec, if it is
@ -892,7 +835,7 @@ check_for_inc_dec (rtx insn)
{
rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
if (note)
for_each_rtx (&insn, replace_inc_dec_mem, insn);
for_each_inc_dec (&insn, emit_inc_dec_insn_before, insn);
}
@ -1107,7 +1050,7 @@ canon_address (rtx mem,
*alias_set_out = 0;
cselib_lookup (mem_address, address_mode, 1);
cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
if (dump_file)
{
@ -1187,7 +1130,7 @@ canon_address (rtx mem,
}
}
*base = cselib_lookup (address, address_mode, true);
*base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
*group_id = -1;
if (*base == NULL)

View File

@ -2738,7 +2738,7 @@ do_local_cprop (rtx x, rtx insn)
|| (GET_CODE (PATTERN (insn)) != USE
&& asm_noperands (PATTERN (insn)) < 0)))
{
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
struct elt_loc_list *l;
if (!val)

View File

@ -262,7 +262,7 @@ reload_cse_simplify_set (rtx set, rtx insn)
return 0;
#endif
val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
if (! val)
return 0;
@ -476,7 +476,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg)
continue;
}
#endif /* LOAD_EXTEND_OP */
v = cselib_lookup (op, recog_data.operand_mode[i], 0);
v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
if (! v)
continue;

View File

@ -1916,6 +1916,17 @@ extern int computed_jump_p (const_rtx);
typedef int (*rtx_function) (rtx *, void *);
extern int for_each_rtx (rtx *, rtx_function, void *);
/* Callback for for_each_inc_dec, to process the autoinc operation OP
within MEM that sets DEST to SRC + SRCOFF, or SRC if SRCOFF is
NULL. The callback is passed the same opaque ARG passed to
for_each_inc_dec. Return zero to continue looking for other
autoinc operations, -1 to skip OP's operands, and any other value
to interrupt the traversal and return that value to the caller of
for_each_inc_dec. */
typedef int (*for_each_inc_dec_fn) (rtx mem, rtx op, rtx dest, rtx src,
rtx srcoff, void *arg);
extern int for_each_inc_dec (rtx *, for_each_inc_dec_fn, void *arg);
typedef int (*rtx_equal_p_callback_function) (const_rtx *, const_rtx *,
rtx *, rtx *);
extern int rtx_equal_p_cb (const_rtx, const_rtx,

View File

@ -2886,7 +2886,124 @@ for_each_rtx (rtx *x, rtx_function f, void *data)
return for_each_rtx_1 (*x, i, f, data);
}
/* Data structure that holds the internal state communicated between
for_each_inc_dec, for_each_inc_dec_find_mem and
for_each_inc_dec_find_inc_dec. */
struct for_each_inc_dec_ops {
/* The function to be called for each autoinc operation found. */
for_each_inc_dec_fn fn;
/* The opaque argument to be passed to it. */
void *arg;
/* The MEM we're visiting, if any. */
rtx mem;
};
static int for_each_inc_dec_find_mem (rtx *r, void *d);
/* Find PRE/POST-INC/DEC/MODIFY operations within *R, extract the
operands of the equivalent add insn and pass the result to the
operator specified by *D. */
static int
for_each_inc_dec_find_inc_dec (rtx *r, void *d)
{
rtx x = *r;
struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
switch (GET_CODE (x))
{
case PRE_INC:
case POST_INC:
{
int size = GET_MODE_SIZE (GET_MODE (data->mem));
rtx r1 = XEXP (x, 0);
rtx c = gen_int_mode (size, GET_MODE (r1));
return data->fn (data->mem, x, r1, r1, c, data->arg);
}
case PRE_DEC:
case POST_DEC:
{
int size = GET_MODE_SIZE (GET_MODE (data->mem));
rtx r1 = XEXP (x, 0);
rtx c = gen_int_mode (-size, GET_MODE (r1));
return data->fn (data->mem, x, r1, r1, c, data->arg);
}
case PRE_MODIFY:
case POST_MODIFY:
{
rtx r1 = XEXP (x, 0);
rtx add = XEXP (x, 1);
return data->fn (data->mem, x, r1, add, NULL, data->arg);
}
case MEM:
{
rtx save = data->mem;
int ret = for_each_inc_dec_find_mem (r, d);
data->mem = save;
return ret;
}
default:
return 0;
}
}
/* If *R is a MEM, find PRE/POST-INC/DEC/MODIFY operations within its
address, extract the operands of the equivalent add insn and pass
the result to the operator specified by *D. */
static int
for_each_inc_dec_find_mem (rtx *r, void *d)
{
rtx x = *r;
if (x != NULL_RTX && MEM_P (x))
{
struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
int result;
data->mem = x;
result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
data);
if (result)
return result;
return -1;
}
return 0;
}
/* Traverse *X looking for MEMs, and for autoinc operations within
them. For each such autoinc operation found, call FN, passing it
the innermost enclosing MEM, the operation itself, the RTX modified
by the operation, two RTXs (the second may be NULL) that, once
added, represent the value to be held by the modified RTX
afterwards, and ARG. FN is to return -1 to skip looking for other
autoinc operations within the visited operation, 0 to continue the
traversal, or any other value to have it returned to the caller of
for_each_inc_dec. */
int
for_each_inc_dec (rtx *x,
for_each_inc_dec_fn fn,
void *arg)
{
struct for_each_inc_dec_ops data;
data.fn = fn;
data.arg = arg;
data.mem = NULL;
return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
}
/* Searches X for any reference to REGNO, returning the rtx of the
reference found if any. Otherwise, returns NULL_RTX. */

View File

@ -1566,7 +1566,7 @@ add_insn_mem_dependence (struct deps_desc *deps, bool read_p,
if (sched_deps_info->use_cselib)
{
mem = shallow_copy_rtx (mem);
XEXP (mem, 0) = cselib_subst_to_values (XEXP (mem, 0));
XEXP (mem, 0) = cselib_subst_to_values (XEXP (mem, 0), GET_MODE (mem));
}
link = alloc_EXPR_LIST (VOIDmode, canon_rtx (mem), *mem_list);
*mem_list = link;
@ -2283,8 +2283,9 @@ sched_analyze_1 (struct deps_desc *deps, rtx x, rtx insn)
= targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
t = shallow_copy_rtx (dest);
cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1, insn);
XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0));
cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1,
GET_MODE (t), insn);
XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0), GET_MODE (t));
}
t = canon_rtx (t);
@ -2440,8 +2441,9 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx insn)
= targetm.addr_space.address_mode (MEM_ADDR_SPACE (t));
t = shallow_copy_rtx (t);
cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1, insn);
XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0));
cselib_lookup_from_insn (XEXP (t, 0), address_mode, 1,
GET_MODE (t), insn);
XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0), GET_MODE (t));
}
if (!DEBUG_INSN_P (insn))

View File

@ -960,8 +960,8 @@ debug_mem_addr_value (rtx x)
address_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
t = shallow_copy_rtx (x);
if (cselib_lookup (XEXP (t, 0), address_mode, 0))
XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0));
if (cselib_lookup (XEXP (t, 0), address_mode, 0, GET_MODE (t)))
XEXP (t, 0) = cselib_subst_to_values (XEXP (t, 0), GET_MODE (t));
t = canon_rtx (t);
addr = get_addr (XEXP (t, 0));

View File

@ -737,7 +737,7 @@ use_narrower_mode_test (rtx *loc, void *data)
switch (GET_CODE (*loc))
{
case REG:
if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0))
if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
return 1;
return -1;
case PLUS:
@ -3954,8 +3954,10 @@ variable_post_merge_new_vals (void **slot, void *info)
subsequent rounds. */
cselib_val *v;
gcc_assert (!cselib_lookup (node->loc,
GET_MODE (node->loc), 0));
v = cselib_lookup (node->loc, GET_MODE (node->loc), 1);
GET_MODE (node->loc), 0,
VOIDmode));
v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
VOIDmode);
cselib_preserve_value (v);
cselib_invalidate_rtx (node->loc);
cval = v->val_rtx;
@ -4793,7 +4795,7 @@ find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui)
return cui->sets[i].src_elt;
}
else
return cselib_lookup (x, mode, 0);
return cselib_lookup (x, mode, 0, VOIDmode);
}
return NULL;
@ -4822,14 +4824,15 @@ replace_expr_with_values (rtx loc)
else if (MEM_P (loc))
{
cselib_val *addr = cselib_lookup (XEXP (loc, 0),
get_address_mode (loc), 0);
get_address_mode (loc), 0,
GET_MODE (loc));
if (addr)
return replace_equiv_address_nv (loc, addr->val_rtx);
else
return NULL;
}
else
return cselib_subst_to_values (loc);
return cselib_subst_to_values (loc, VOIDmode);
}
/* Determine what kind of micro operation to choose for a USE. Return
@ -4849,7 +4852,8 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
rtx ploc = PAT_VAR_LOCATION_LOC (loc);
if (! VAR_LOC_UNKNOWN_P (ploc))
{
cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1);
cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
VOIDmode);
/* ??? flag_float_store and volatile mems are never
given values, but we could in theory use them for
@ -4871,7 +4875,8 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep)
if (REG_P (loc)
|| (find_use_val (loc, GET_MODE (loc), cui)
&& cselib_lookup (XEXP (loc, 0),
get_address_mode (loc), 0)))
get_address_mode (loc), 0,
GET_MODE (loc))))
return MO_VAL_SET;
}
else
@ -5033,13 +5038,15 @@ add_uses (rtx *ploc, void *data)
rtx mloc = vloc;
enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
{
micro_operation moa;
preserve_value (val);
mloc = cselib_subst_to_values (XEXP (mloc, 0));
mloc = cselib_subst_to_values (XEXP (mloc, 0),
GET_MODE (mloc));
moa.type = MO_VAL_USE;
moa.insn = cui->insn;
moa.u.loc = gen_rtx_CONCAT (address_mode,
@ -5109,13 +5116,15 @@ add_uses (rtx *ploc, void *data)
rtx mloc = oloc;
enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val
= cselib_lookup (XEXP (mloc, 0), address_mode, 0);
= cselib_lookup (XEXP (mloc, 0), address_mode, 0,
GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
{
micro_operation moa;
preserve_value (val);
mloc = cselib_subst_to_values (XEXP (mloc, 0));
mloc = cselib_subst_to_values (XEXP (mloc, 0),
GET_MODE (mloc));
moa.type = MO_VAL_USE;
moa.insn = cui->insn;
moa.u.loc = gen_rtx_CONCAT (address_mode,
@ -5225,7 +5234,7 @@ reverse_op (rtx val, const_rtx expr)
if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
return NULL_RTX;
v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0);
v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
if (!v || !cselib_preserved_value_p (v))
return NULL_RTX;
@ -5346,13 +5355,15 @@ add_stores (rtx loc, const_rtx expr, void *cuip)
rtx mloc = loc;
enum machine_mode address_mode = get_address_mode (mloc);
cselib_val *val = cselib_lookup (XEXP (mloc, 0),
address_mode, 0);
address_mode, 0,
GET_MODE (mloc));
if (val && !cselib_preserved_value_p (val))
{
preserve_value (val);
mo.type = MO_VAL_USE;
mloc = cselib_subst_to_values (XEXP (mloc, 0));
mloc = cselib_subst_to_values (XEXP (mloc, 0),
GET_MODE (mloc));
mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc);
mo.insn = cui->insn;
if (dump_file && (dump_flags & TDF_DETAILS))
@ -5411,7 +5422,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip)
if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
{
cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0);
cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
gcc_assert (oval != v);
gcc_assert (REG_P (oloc) || MEM_P (oloc));
@ -8077,7 +8088,8 @@ vt_add_function_parameter (tree parm)
if (offset)
return;
val = cselib_lookup (var_lowpart (mode, incoming), mode, true);
val = cselib_lookup (var_lowpart (mode, incoming), mode, true,
VOIDmode);
/* ??? Float-typed values in memory are not handled by
cselib. */
@ -8197,7 +8209,7 @@ vt_init_cfa_base (void)
frame_pointer_needed
? hard_frame_pointer_rtx : stack_pointer_rtx);
val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
get_insns ());
VOIDmode, get_insns ());
preserve_value (val);
cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx,