alpha.c (aligned_memory_operand): Recognize the output of LEGITIMIZE_RELOAD_ADDRESS.

* alpha.c (aligned_memory_operand): Recognize the output of
        LEGITIMIZE_RELOAD_ADDRESS.  Examine reg_equiv_memory_loc in
        the event of a pseudo.
        (unaligned_memory_operand): Likewise.  Don't otherwise accept
        completely illegal addresses.
        (normal_memory_operand): Likewise.  Handle subregs of pseudos.
        (get_aligned_mem): Revert previous change.  Abort if we don't have a
        mem.  During reload, call find_replacement on all illegal memories.
        (get_unaligned_address): Likewise.
        * alpha.h (SECONDARY_INPUT_RELOAD_CLASS): Use !aligned_memory_operand
        instead of unaligned_memory_operand.
        * alpha.md: Revert extra argument to get_aligned_mem.
        (reload_inqi): Use any_memory_operand in constraints.  Abort if
        we're not given some sort of mem.
        (reload_inhi): Likewise.
        (reload_outqi, reload_outhi): Likewise.

From-SVN: r26445
This commit is contained in:
Richard Henderson 1999-04-14 03:13:19 -07:00 committed by Richard Henderson
parent 4eea167258
commit 4e46365be1
4 changed files with 133 additions and 140 deletions

View File

@ -3,6 +3,23 @@ Wed Apr 14 09:59:38 1999 Richard Henderson <rth@cygnus.com>
* reload1.c (emit_reload_insns): Also find equivalent mems
for subregs of pseudos.
* alpha.c (aligned_memory_operand): Recognize the output of
LEGITIMIZE_RELOAD_ADDRESS. Examine reg_equiv_memory_loc in
the event of a pseudo.
(unaligned_memory_operand): Likewise. Don't otherwise accept
completely illegal addresses.
(normal_memory_operand): Likewise. Handle subregs of pseudos.
(get_aligned_mem): Revert previous change. Abort if we don't have a
mem. During reload, call find_replacement on all illegal memories.
(get_unaligned_address): Likewise.
* alpha.h (SECONDARY_INPUT_RELOAD_CLASS): Use !aligned_memory_operand
instead of unaligned_memory_operand.
* alpha.md: Revert extra argument to get_aligned_mem.
(reload_inqi): Use any_memory_operand in constraints. Abort if
we're not given some sort of mem.
(reload_inhi): Likewise.
(reload_outqi, reload_outhi): Likewise.
Wed Apr 14 09:39:20 1999 Richard Henderson <rth@cygnus.com>
* i386.md (neghi): Use the whole register when widening the op.

View File

@ -735,36 +735,42 @@ aligned_memory_operand (op, mode)
register rtx op;
enum machine_mode mode;
{
if (GET_CODE (op) == SUBREG)
{
if (GET_MODE (op) != mode)
return 0;
op = SUBREG_REG (op);
mode = GET_MODE (op);
}
rtx base;
if (reload_in_progress)
{
/* This is a stack slot. The stack pointer is always aligned.
We may have to jump through hoops to get a valid address,
but we can do it. */
if (GET_CODE (op) == REG
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)
return 1;
rtx tmp = op;
if (GET_CODE (tmp) == SUBREG)
tmp = SUBREG_REG (tmp);
if (GET_CODE (tmp) == REG
&& REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
{
op = reg_equiv_memory_loc[REGNO (tmp)];
if (op == 0)
return 0;
}
}
if (GET_CODE (op) != MEM
|| GET_MODE (op) != mode
|| ! memory_address_p (mode, XEXP (op, 0)))
|| GET_MODE (op) != mode)
return 0;
op = XEXP (op, 0);
if (GET_CODE (op) == PLUS)
op = XEXP (op, 0);
/* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
sorts of constructs. Dig for the real base register. */
if (reload_in_progress
&& GET_CODE (op) == PLUS
&& GET_CODE (XEXP (op, 0)) == PLUS)
base = XEXP (XEXP (op, 0), 0);
else
{
if (! memory_address_p (mode, op))
return 0;
base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
}
return (GET_CODE (op) == REG
&& REGNO_POINTER_ALIGN (REGNO (op)) >= 4);
return (GET_CODE (base) == REG
&& REGNO_POINTER_ALIGN (REGNO (base)) >= 4);
}
/* Similar, but return 1 if OP is a MEM which is not alignable. */
@ -774,31 +780,42 @@ unaligned_memory_operand (op, mode)
register rtx op;
enum machine_mode mode;
{
if (GET_CODE (op) == SUBREG)
rtx base;
if (reload_in_progress)
{
if (GET_MODE (op) != mode)
return 0;
op = SUBREG_REG (op);
mode = GET_MODE (op);
rtx tmp = op;
if (GET_CODE (tmp) == SUBREG)
tmp = SUBREG_REG (tmp);
if (GET_CODE (tmp) == REG
&& REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
{
op = reg_equiv_memory_loc[REGNO (tmp)];
if (op == 0)
return 0;
}
}
if (reload_in_progress && GET_CODE (op) == REG
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)
op = reg_equiv_mem[REGNO (op)];
if (GET_CODE (op) != MEM || GET_MODE (op) != mode)
if (GET_CODE (op) != MEM
|| GET_MODE (op) != mode)
return 0;
op = XEXP (op, 0);
if (! memory_address_p (mode, op))
return 1;
/* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
sorts of constructs. Dig for the real base register. */
if (reload_in_progress
&& GET_CODE (op) == PLUS
&& GET_CODE (XEXP (op, 0)) == PLUS)
base = XEXP (XEXP (op, 0), 0);
else
{
if (! memory_address_p (mode, op))
return 0;
base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
}
if (GET_CODE (op) == PLUS)
op = XEXP (op, 0);
return (GET_CODE (op) != REG
|| REGNO_POINTER_ALIGN (REGNO (op)) < 4);
return (GET_CODE (base) == REG
&& REGNO_POINTER_ALIGN (REGNO (base)) < 4);
}
/* Return 1 if OP is either a register or an unaligned memory location. */
@ -861,15 +878,21 @@ normal_memory_operand (op, mode)
register rtx op;
enum machine_mode mode ATTRIBUTE_UNUSED;
{
if (reload_in_progress && GET_CODE (op) == REG
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)
if (reload_in_progress)
{
op = reg_equiv_mem[REGNO (op)];
rtx tmp = op;
if (GET_CODE (tmp) == SUBREG)
tmp = SUBREG_REG (tmp);
if (GET_CODE (tmp) == REG
&& REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
{
op = reg_equiv_memory_loc[REGNO (tmp)];
/* This may not have been assigned an equivalent address if it will
be eliminated. In that case, it doesn't matter what we do. */
if (op == 0)
return 1;
/* This may not have been assigned an equivalent address if it will
be eliminated. In that case, it doesn't matter what we do. */
if (op == 0)
return 1;
}
}
return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) != AND;
@ -906,63 +929,26 @@ direct_return ()
of range stack slots. */
void
get_aligned_mem (ref, scratch, paligned_mem, pbitnum)
rtx ref, scratch;
get_aligned_mem (ref, paligned_mem, pbitnum)
rtx ref;
rtx *paligned_mem, *pbitnum;
{
rtx base;
HOST_WIDE_INT offset = 0;
if (GET_CODE (ref) == SUBREG)
if (GET_CODE (ref) != MEM)
abort ();
if (reload_in_progress
&& ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
{
offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
- MIN (UNITS_PER_WORD,
GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
ref = SUBREG_REG (ref);
}
base = find_replacement (&XEXP (ref, 0));
if (reload_in_progress)
{
if (GET_CODE (ref) == REG)
{
/* The "simple" case is where the stack slot is in range. */
if (reg_equiv_mem[REGNO (ref)])
{
ref = reg_equiv_mem[REGNO (ref)];
base = find_replacement (&XEXP (ref, 0));
}
else
{
/* The stack slot isn't in range. Fix it up as needed. */
HOST_WIDE_INT hi, lo;
base = reg_equiv_address[REGNO (ref)];
if (GET_CODE (base) != PLUS)
abort ();
offset += INTVAL (XEXP (base, 1));
base = XEXP (base, 0);
lo = ((offset & 0xFFFF) ^ 0x8000) - 0x8000;
hi = (((offset - lo) & 0xFFFFFFFF) ^ 0x80000000) - 0x80000000;
if (hi + lo != offset)
abort ();
if (scratch == NULL)
abort ();
emit_insn (gen_adddi3 (scratch, base, GEN_INT (hi)));
base = scratch;
offset = lo;
}
}
else
base = find_replacement (&XEXP (ref, 0));
if (! memory_address_p (GET_MODE (ref), base))
abort ();
}
else
{
if (GET_CODE (ref) != MEM)
abort ();
base = XEXP (ref, 0);
}
@ -991,35 +977,19 @@ get_unaligned_address (ref, extra_offset)
rtx base;
HOST_WIDE_INT offset = 0;
if (GET_CODE (ref) == SUBREG)
{
offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
if (BYTES_BIG_ENDIAN)
offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
- MIN (UNITS_PER_WORD,
GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
ref = SUBREG_REG (ref);
}
if (GET_CODE (ref) != MEM)
abort ();
if (reload_in_progress)
if (reload_in_progress
&& ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
{
if (GET_CODE (ref) == REG)
{
if (reg_equiv_mem[REGNO (ref)])
ref = reg_equiv_mem[REGNO (ref)];
else
{
/* The stack slot is out of range. We should have handled
this as an aligned access -- I wonder why we didn't? */
abort ();
}
}
base = find_replacement (&XEXP (ref, 0));
if (! memory_address_p (GET_MODE (ref), base))
abort ();
}
else
{
if (GET_CODE (ref) != MEM)
abort ();
base = XEXP (ref, 0);
}

View File

@ -820,7 +820,7 @@ extern int normal_memory_operand ();
&& (((CLASS) == FLOAT_REGS \
&& ((MODE) == SImode || (MODE) == HImode || (MODE) == QImode)) \
|| (((MODE) == QImode || (MODE) == HImode) \
&& ! TARGET_BWX && unaligned_memory_operand (IN, MODE)))) \
&& ! TARGET_BWX && ! aligned_memory_operand (IN, MODE)))) \
? GENERAL_REGS \
: ((CLASS) == FLOAT_REGS && GET_CODE (IN) == MEM \
&& GET_CODE (XEXP (IN, 0)) == AND) ? GENERAL_REGS \

View File

@ -4522,7 +4522,7 @@
? gen_rtx_REG (SImode, REGNO (operands[0]))
: gen_reg_rtx (SImode));
get_aligned_mem (operands[1], scratch, &aligned_mem, &bitnum);
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
emit_insn (gen_aligned_loadqi (operands[0], aligned_mem, bitnum,
scratch));
@ -4562,7 +4562,7 @@
rtx temp1 = gen_reg_rtx (SImode);
rtx temp2 = gen_reg_rtx (SImode);
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
temp1, temp2));
@ -4574,7 +4574,7 @@
rtx temp3 = gen_reg_rtx (DImode);
rtx seq
= gen_unaligned_storeqi (get_unaligned_address (operands[0], 0),
operands[1], temp1, temp2, temp3);
operands[1], temp1, temp2, temp3);
alpha_set_memflags (seq, operands[0]);
emit_insn (seq);
@ -4633,7 +4633,7 @@
? gen_rtx_REG (SImode, REGNO (operands[0]))
: gen_reg_rtx (SImode));
get_aligned_mem (operands[1], scratch, &aligned_mem, &bitnum);
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
emit_insn (gen_aligned_loadhi (operands[0], aligned_mem, bitnum,
scratch));
@ -4673,7 +4673,7 @@
rtx temp1 = gen_reg_rtx (SImode);
rtx temp2 = gen_reg_rtx (SImode);
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
temp1, temp2));
@ -4702,20 +4702,21 @@
(define_expand "reload_inqi"
[(parallel [(match_operand:QI 0 "register_operand" "=r")
(match_operand:QI 1 "unaligned_memory_operand" "m")
(match_operand:QI 1 "any_memory_operand" "m")
(match_operand:TI 2 "register_operand" "=&r")])]
"! TARGET_BWX"
"
{
rtx scratch, seq;
if (GET_CODE (operands[1]) != MEM)
abort ();
if (aligned_memory_operand (operands[1], QImode))
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[1],
gen_rtx_REG (DImode, REGNO (operands[2]) + 1),
&aligned_mem, &bitnum);
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
seq = gen_aligned_loadqi (operands[0], aligned_mem, bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])));
}
@ -4724,8 +4725,8 @@
rtx addr;
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
if (REGNO (operands[0]) == REGNO (operands[2]))
scratch = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
else
@ -4733,9 +4734,9 @@
addr = get_unaligned_address (operands[1], 0);
seq = gen_unaligned_loadqi (operands[0], addr, scratch,
gen_rtx_REG (DImode, REGNO (operands[0])));
gen_rtx_REG (DImode, REGNO (operands[0])));
alpha_set_memflags (seq, operands[1]);
}
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
DONE;
}")
@ -4749,13 +4750,14 @@
{
rtx scratch, seq;
if (GET_CODE (operands[1]) != MEM)
abort ();
if (aligned_memory_operand (operands[1], HImode))
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[1],
gen_rtx_REG (DImode, REGNO (operands[2]) + 1),
&aligned_mem, &bitnum);
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
seq = gen_aligned_loadhi (operands[0], aligned_mem, bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])));
}
@ -4764,8 +4766,8 @@
rtx addr;
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
if (REGNO (operands[0]) == REGNO (operands[2]))
scratch = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
else
@ -4773,9 +4775,9 @@
addr = get_unaligned_address (operands[1], 0);
seq = gen_unaligned_loadhi (operands[0], addr, scratch,
gen_rtx_REG (DImode, REGNO (operands[0])));
gen_rtx_REG (DImode, REGNO (operands[0])));
alpha_set_memflags (seq, operands[1]);
}
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
DONE;
}")
@ -4787,11 +4789,14 @@
"! TARGET_BWX"
"
{
if (GET_CODE (operands[0]) != MEM)
abort ();
if (aligned_memory_operand (operands[0], QImode))
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])),
@ -4814,7 +4819,6 @@
alpha_set_memflags (seq, operands[0]);
emit_insn (seq);
}
DONE;
}")
@ -4825,11 +4829,14 @@
"! TARGET_BWX"
"
{
if (GET_CODE (operands[0]) != MEM)
abort ();
if (aligned_memory_operand (operands[0], HImode))
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[0], NULL_RTX, &aligned_mem, &bitnum);
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
gen_rtx_REG (SImode, REGNO (operands[2])),
@ -4852,7 +4859,6 @@
alpha_set_memflags (seq, operands[0]);
emit_insn (seq);
}
DONE;
}")