predicates.md (aligned_memory_operand): Mark as define_special_predicate.

* config/alpha/predicates.md (aligned_memory_operand): Mark
        as define_special_predicate.
        (unaligned_memory_operand, normal_memory_operand): Likewise.
        (reg_or_unaligned_mem_operand): Remove.
        (any_memory_operand): Match the documentation and check for
        non-renumbered pseudos during reload.
        * config/alpha/alpha.c (alpha_secondary_reload): Rename from
        alpha_secondary_reload_class, update to new interface, make static.
        Handle CQImode like HImode.  Remove FP subreg check.
        (alpha_expand_mov): Use replace_equiv_address.
        (alpha_expand_mov_nobwx): Use any_memory_operand.
        (TARGET_SECONDARY_RELOAD): New.
        * config/alpha/alpha.h (SECONDARY_INPUT_RELOAD_CLASS): Remove.
        (SECONDARY_OUTPUT_RELOAD_CLASS): Remove.
        * config/alpha/sync.md (I12MODE, I48MODE, modesuffix): Move ...
        * config/alpha/alpha.md: ... here.
        (RELOAD12, reloadmode): New.
        (movcqi): New.
        (reload_in<RELOAD12>): Macro-ize from reload_inqi, reload_inhi.
        Don't handle the aligned case here.
        (reload_out<RELOAD12>): Macro-ize from reload_outqi, reload_outhi.
        (reload_in<I12MODE>_aligned): Macro-ize from reload_inqi_help,
        reload_inhi_help.  Don't expect a scratch register.
        (reload_out<I12MODE>_aligned): Macro-ize from reload_outqi_help,
        reload_outhi_help.
        * config/alpha/alpha-protos.h (alpha_secondary_reload_class): Remove.

From-SVN: r124220
This commit is contained in:
Richard Henderson 2007-04-27 07:47:57 -07:00 committed by Richard Henderson
parent 64e8a9f04e
commit 48f46219bd
7 changed files with 217 additions and 252 deletions

View File

@ -1,3 +1,32 @@
2007-04-27 Richard Henderson <rth@redhat.com>
* config/alpha/predicates.md (aligned_memory_operand): Mark
as define_special_predicate.
(unaligned_memory_operand, normal_memory_operand): Likewise.
(reg_or_unaligned_mem_operand): Remove.
(any_memory_operand): Match the documentation and check for
non-renumbered pseudos during reload.
* config/alpha/alpha.c (alpha_secondary_reload): Rename from
alpha_secondary_reload_class, update to new interface, make static.
Handle CQImode like HImode. Remove FP subreg check.
(alpha_expand_mov): Use replace_equiv_address.
(alpha_expand_mov_nobwx): Use any_memory_operand.
(TARGET_SECONDARY_RELOAD): New.
* config/alpha/alpha.h (SECONDARY_INPUT_RELOAD_CLASS): Remove.
(SECONDARY_OUTPUT_RELOAD_CLASS): Remove.
* config/alpha/sync.md (I12MODE, I48MODE, modesuffix): Move ...
* config/alpha/alpha.md: ... here.
(RELOAD12, reloadmode): New.
(movcqi): New.
(reload_in<RELOAD12>): Macro-ize from reload_inqi, reload_inhi.
Don't handle the aligned case here.
(reload_out<RELOAD12>): Macro-ize from reload_outqi, reload_outhi.
(reload_in<I12MODE>_aligned): Macro-ize from reload_inqi_help,
reload_inhi_help. Don't expect a scratch register.
(reload_out<I12MODE>_aligned): Macro-ize from reload_outqi_help,
reload_outhi_help.
* config/alpha/alpha-protos.h (alpha_secondary_reload_class): Remove.
2007-04-27 Richard Guenther <rguenther@suse.de> 2007-04-27 Richard Guenther <rguenther@suse.de>
* tree-ssa-forwprop.c (get_prop_dest_stmt): Fix comment typo. * tree-ssa-forwprop.c (get_prop_dest_stmt): Fix comment typo.

View File

@ -50,9 +50,6 @@ extern void get_aligned_mem (rtx, rtx *, rtx *);
extern rtx get_unaligned_address (rtx); extern rtx get_unaligned_address (rtx);
extern rtx get_unaligned_offset (rtx, HOST_WIDE_INT); extern rtx get_unaligned_offset (rtx, HOST_WIDE_INT);
extern enum reg_class alpha_preferred_reload_class (rtx, enum reg_class); extern enum reg_class alpha_preferred_reload_class (rtx, enum reg_class);
extern enum reg_class alpha_secondary_reload_class (enum reg_class,
enum machine_mode, rtx,
int);
extern void alpha_set_memflags (rtx, rtx); extern void alpha_set_memflags (rtx, rtx);
extern bool alpha_split_const_mov (enum machine_mode, rtx *); extern bool alpha_split_const_mov (enum machine_mode, rtx *);

View File

@ -1533,47 +1533,39 @@ alpha_preferred_reload_class(rtx x, enum reg_class class)
return class; return class;
} }
/* Loading and storing HImode or QImode values to and from memory /* Inform reload about cases where moving X with a mode MODE to a register in
usually requires a scratch register. The exceptions are loading CLASS requires an extra scratch or immediate register. Return the class
QImode and HImode from an aligned address to a general register needed for the immediate register. */
unless byte instructions are permitted.
We also cannot load an unaligned address or a paradoxical SUBREG static enum reg_class
into an FP register. alpha_secondary_reload (bool in_p, rtx x, enum reg_class class,
enum machine_mode mode, secondary_reload_info *sri)
We also cannot do integral arithmetic into FP regs, as might result
from register elimination into a DImode fp register. */
enum reg_class
alpha_secondary_reload_class (enum reg_class class, enum machine_mode mode,
rtx x, int in)
{ {
if ((mode == QImode || mode == HImode) && ! TARGET_BWX) /* Loading and storing HImode or QImode values to and from memory
usually requires a scratch register. */
if (!TARGET_BWX && (mode == QImode || mode == HImode || mode == CQImode))
{ {
if (GET_CODE (x) == MEM if (any_memory_operand (x, mode))
|| (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
|| (GET_CODE (x) == SUBREG
&& (GET_CODE (SUBREG_REG (x)) == MEM
|| (GET_CODE (SUBREG_REG (x)) == REG
&& REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
{ {
if (!in || !aligned_memory_operand(x, mode)) if (in_p)
return GENERAL_REGS; {
if (!aligned_memory_operand (x, mode))
sri->icode = reload_in_optab[mode];
}
else
sri->icode = reload_out_optab[mode];
return NO_REGS;
} }
} }
/* We also cannot do integral arithmetic into FP regs, as might result
from register elimination into a DImode fp register. */
if (class == FLOAT_REGS) if (class == FLOAT_REGS)
{ {
if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND) if (MEM_P (x) && GET_CODE (XEXP (x, 0)) == AND)
return GENERAL_REGS; return GENERAL_REGS;
if (in_p && INTEGRAL_MODE_P (mode)
if (GET_CODE (x) == SUBREG && !MEM_P (x) && !REG_P (x) && !CONST_INT_P (x))
&& (GET_MODE_SIZE (GET_MODE (x))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
return GENERAL_REGS;
if (in && INTEGRAL_MODE_P (mode)
&& ! (memory_operand (x, mode) || x == const0_rtx))
return GENERAL_REGS; return GENERAL_REGS;
} }
@ -2160,8 +2152,7 @@ alpha_expand_mov (enum machine_mode mode, rtx *operands)
if (reload_in_progress) if (reload_in_progress)
{ {
emit_move_insn (operands[0], XEXP (operands[1], 0)); emit_move_insn (operands[0], XEXP (operands[1], 0));
operands[1] = copy_rtx (operands[1]); operands[1] = replace_equiv_address (operands[1], operands[0]);
XEXP (operands[1], 0) = operands[0];
} }
else else
operands[1] = validize_mem (operands[1]); operands[1] = validize_mem (operands[1]);
@ -2174,32 +2165,27 @@ alpha_expand_mov (enum machine_mode mode, rtx *operands)
bool bool
alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands) alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
{ {
rtx seq;
/* If the output is not a register, the input must be. */ /* If the output is not a register, the input must be. */
if (GET_CODE (operands[0]) == MEM) if (MEM_P (operands[0]))
operands[1] = force_reg (mode, operands[1]); operands[1] = force_reg (mode, operands[1]);
/* Handle four memory cases, unaligned and aligned for either the input /* Handle four memory cases, unaligned and aligned for either the input
or the output. The only case where we can be called during reload is or the output. The only case where we can be called during reload is
for aligned loads; all other cases require temporaries. */ for aligned loads; all other cases require temporaries. */
if (GET_CODE (operands[1]) == MEM if (any_memory_operand (operands[1], mode))
|| (GET_CODE (operands[1]) == SUBREG
&& GET_CODE (SUBREG_REG (operands[1])) == MEM)
|| (reload_in_progress && GET_CODE (operands[1]) == REG
&& REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
|| (reload_in_progress && GET_CODE (operands[1]) == SUBREG
&& GET_CODE (SUBREG_REG (operands[1])) == REG
&& REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
{ {
if (aligned_memory_operand (operands[1], mode)) if (aligned_memory_operand (operands[1], mode))
{ {
if (reload_in_progress) if (reload_in_progress)
{ {
emit_insn ((mode == QImode if (mode == QImode)
? gen_reload_inqi_help seq = gen_reload_inqi_aligned (operands[0], operands[1]);
: gen_reload_inhi_help) else
(operands[0], operands[1], seq = gen_reload_inhi_aligned (operands[0], operands[1]);
gen_rtx_REG (SImode, REGNO (operands[0])))); emit_insn (seq);
} }
else else
{ {
@ -2216,10 +2202,13 @@ alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
else else
subtarget = gen_reg_rtx (DImode), copyout = true; subtarget = gen_reg_rtx (DImode), copyout = true;
emit_insn ((mode == QImode if (mode == QImode)
? gen_aligned_loadqi seq = gen_aligned_loadqi (subtarget, aligned_mem,
: gen_aligned_loadhi) bitnum, scratch);
(subtarget, aligned_mem, bitnum, scratch)); else
seq = gen_aligned_loadhi (subtarget, aligned_mem,
bitnum, scratch);
emit_insn (seq);
if (copyout) if (copyout)
emit_move_insn (operands[0], gen_lowpart (mode, subtarget)); emit_move_insn (operands[0], gen_lowpart (mode, subtarget));
@ -2231,7 +2220,7 @@ alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
code depend on parameter evaluation order which will cause code depend on parameter evaluation order which will cause
bootstrap failures. */ bootstrap failures. */
rtx temp1, temp2, seq, subtarget; rtx temp1, temp2, subtarget, ua;
bool copyout; bool copyout;
temp1 = gen_reg_rtx (DImode); temp1 = gen_reg_rtx (DImode);
@ -2243,11 +2232,12 @@ alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
else else
subtarget = gen_reg_rtx (DImode), copyout = true; subtarget = gen_reg_rtx (DImode), copyout = true;
seq = ((mode == QImode ua = get_unaligned_address (operands[1]);
? gen_unaligned_loadqi if (mode == QImode)
: gen_unaligned_loadhi) seq = gen_unaligned_loadqi (subtarget, ua, temp1, temp2);
(subtarget, get_unaligned_address (operands[1]), else
temp1, temp2)); seq = gen_unaligned_loadhi (subtarget, ua, temp1, temp2);
alpha_set_memflags (seq, operands[1]); alpha_set_memflags (seq, operands[1]);
emit_insn (seq); emit_insn (seq);
@ -2257,14 +2247,7 @@ alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
return true; return true;
} }
if (GET_CODE (operands[0]) == MEM if (any_memory_operand (operands[0], mode))
|| (GET_CODE (operands[0]) == SUBREG
&& GET_CODE (SUBREG_REG (operands[0])) == MEM)
|| (reload_in_progress && GET_CODE (operands[0]) == REG
&& REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
|| (reload_in_progress && GET_CODE (operands[0]) == SUBREG
&& GET_CODE (SUBREG_REG (operands[0])) == REG
&& REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
{ {
if (aligned_memory_operand (operands[0], mode)) if (aligned_memory_operand (operands[0], mode))
{ {
@ -2282,11 +2265,12 @@ alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands)
rtx temp1 = gen_reg_rtx (DImode); rtx temp1 = gen_reg_rtx (DImode);
rtx temp2 = gen_reg_rtx (DImode); rtx temp2 = gen_reg_rtx (DImode);
rtx temp3 = gen_reg_rtx (DImode); rtx temp3 = gen_reg_rtx (DImode);
rtx seq = ((mode == QImode rtx ua = get_unaligned_address (operands[0]);
? gen_unaligned_storeqi
: gen_unaligned_storehi) if (mode == QImode)
(get_unaligned_address (operands[0]), seq = gen_unaligned_storeqi (ua, operands[1], temp1, temp2, temp3);
operands[1], temp1, temp2, temp3)); else
seq = gen_unaligned_storehi (ua, operands[1], temp1, temp2, temp3);
alpha_set_memflags (seq, operands[0]); alpha_set_memflags (seq, operands[0]);
emit_insn (seq); emit_insn (seq);
@ -10703,6 +10687,9 @@ alpha_init_libfuncs (void)
#undef TARGET_ARG_PARTIAL_BYTES #undef TARGET_ARG_PARTIAL_BYTES
#define TARGET_ARG_PARTIAL_BYTES alpha_arg_partial_bytes #define TARGET_ARG_PARTIAL_BYTES alpha_arg_partial_bytes
#undef TARGET_SECONDARY_RELOAD
#define TARGET_SECONDARY_RELOAD alpha_secondary_reload
#undef TARGET_SCALAR_MODE_SUPPORTED_P #undef TARGET_SCALAR_MODE_SUPPORTED_P
#define TARGET_SCALAR_MODE_SUPPORTED_P alpha_scalar_mode_supported_p #define TARGET_SCALAR_MODE_SUPPORTED_P alpha_scalar_mode_supported_p
#undef TARGET_VECTOR_MODE_SUPPORTED_P #undef TARGET_VECTOR_MODE_SUPPORTED_P

View File

@ -582,19 +582,6 @@ enum reg_class {
#define PREFERRED_RELOAD_CLASS alpha_preferred_reload_class #define PREFERRED_RELOAD_CLASS alpha_preferred_reload_class
/* Loading and storing HImode or QImode values to and from memory
usually requires a scratch register. The exceptions are loading
QImode and HImode from an aligned address to a general register
unless byte instructions are permitted.
We also cannot load an unaligned address or a paradoxical SUBREG into an
FP register. */
#define SECONDARY_INPUT_RELOAD_CLASS(CLASS,MODE,IN) \
alpha_secondary_reload_class((CLASS), (MODE), (IN), 1)
#define SECONDARY_OUTPUT_RELOAD_CLASS(CLASS,MODE,OUT) \
alpha_secondary_reload_class((CLASS), (MODE), (OUT), 0)
/* If we are copying between general and FP registers, we need a memory /* If we are copying between general and FP registers, we need a memory
location unless the FIX extension is available. */ location unless the FIX extension is available. */

View File

@ -87,6 +87,16 @@
(UNSPECV_SC 16) ; store-conditional (UNSPECV_SC 16) ; store-conditional
]) ])
;; On non-BWX targets, CQImode must be handled the similarly to HImode
;; when generating reloads.
(define_mode_macro RELOAD12 [QI HI CQI])
(define_mode_attr reloadmode [(QI "qi") (HI "hi") (CQI "hi")])
;; Other mode macros
(define_mode_macro I12MODE [QI HI])
(define_mode_macro I48MODE [SI DI])
(define_mode_attr modesuffix [(SI "l") (DI "q")])
;; Where necessary, the suffixes _le and _be are used to distinguish between ;; Where necessary, the suffixes _le and _be are used to distinguish between
;; little-endian and big-endian patterns. ;; little-endian and big-endian patterns.
;; ;;
@ -6085,136 +6095,120 @@
DONE; DONE;
}) })
;; Here are the versions for reload. Note that in the unaligned cases ;; We need to hook into the extra support that we have for HImode
;; we know that the operand must not be a pseudo-register because stack ;; reloads when BWX insns are not available.
;; slots are always aligned references. (define_expand "movcqi"
[(set (match_operand:CQI 0 "nonimmediate_operand" "")
(define_expand "reload_inqi" (match_operand:CQI 1 "general_operand" ""))]
[(parallel [(match_operand:QI 0 "register_operand" "=r") "!TARGET_BWX"
(match_operand:QI 1 "any_memory_operand" "m")
(match_operand:TI 2 "register_operand" "=&r")])]
"! TARGET_BWX"
{ {
rtx scratch, seq; if (GET_CODE (operands[0]) == CONCAT || GET_CODE (operands[1]) == CONCAT)
;
if (aligned_memory_operand (operands[1], QImode)) else if (!any_memory_operand (operands[0], CQImode))
{ {
seq = gen_reload_inqi_help (operands[0], operands[1], if (!any_memory_operand (operands[1], CQImode))
gen_rtx_REG (SImode, REGNO (operands[2]))); {
emit_move_insn (gen_lowpart (HImode, operands[0]),
gen_lowpart (HImode, operands[1]));
DONE;
}
if (aligned_memory_operand (operands[1], CQImode))
{
bool done;
do_aligned1:
operands[1] = gen_lowpart (HImode, operands[1]);
do_aligned2:
operands[0] = gen_lowpart (HImode, operands[0]);
done = alpha_expand_mov_nobwx (HImode, operands);
gcc_assert (done);
DONE;
}
} }
else else if (aligned_memory_operand (operands[0], CQImode))
{ {
rtx addr; if (MEM_P (operands[1]))
{
/* It is possible that one of the registers we got for operands[2] rtx x = gen_reg_rtx (HImode);
might coincide with that of operands[0] (which is why we made emit_move_insn (gen_lowpart (CQImode, x), operands[1]);
it TImode). Pick the other one to use as our scratch. */ operands[1] = x;
if (REGNO (operands[0]) == REGNO (operands[2])) goto do_aligned2;
scratch = gen_rtx_REG (DImode, REGNO (operands[2]) + 1); }
else goto do_aligned1;
scratch = gen_rtx_REG (DImode, REGNO (operands[2]));
addr = get_unaligned_address (operands[1]);
operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
seq = gen_unaligned_loadqi (operands[0], addr, scratch, operands[0]);
alpha_set_memflags (seq, operands[1]);
} }
gcc_assert (!reload_in_progress);
emit_move_complex_parts (operands[0], operands[1]);
DONE;
})
;; Here are the versions for reload.
;;
;; The aligned input case is recognized early in alpha_secondary_reload
;; in order to avoid allocating an unnecessary scratch register.
;;
;; Note that in the unaligned cases we know that the operand must not be
;; a pseudo-register because stack slots are always aligned references.
(define_expand "reload_in<mode>"
[(parallel [(match_operand:RELOAD12 0 "register_operand" "=r")
(match_operand:RELOAD12 1 "any_memory_operand" "m")
(match_operand:TI 2 "register_operand" "=&r")])]
"!TARGET_BWX"
{
rtx scratch, seq, addr;
unsigned regno = REGNO (operands[2]);
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
if (regno == REGNO (operands[0]))
regno++;
scratch = gen_rtx_REG (DImode, regno);
addr = get_unaligned_address (operands[1]);
operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
seq = gen_unaligned_load<reloadmode> (operands[0], addr,
scratch, operands[0]);
alpha_set_memflags (seq, operands[1]);
emit_insn (seq); emit_insn (seq);
DONE; DONE;
}) })
(define_expand "reload_inhi" (define_expand "reload_out<mode>"
[(parallel [(match_operand:HI 0 "register_operand" "=r") [(parallel [(match_operand:RELOAD12 0 "any_memory_operand" "=m")
(match_operand:HI 1 "any_memory_operand" "m") (match_operand:RELOAD12 1 "register_operand" "r")
(match_operand:TI 2 "register_operand" "=&r")])] (match_operand:TI 2 "register_operand" "=&r")])]
"! TARGET_BWX" "! TARGET_BWX"
{ {
rtx scratch, seq; unsigned regno = REGNO (operands[2]);
if (aligned_memory_operand (operands[1], HImode)) if (<MODE>mode == CQImode)
{ {
seq = gen_reload_inhi_help (operands[0], operands[1], operands[0] = gen_lowpart (HImode, operands[0]);
gen_rtx_REG (SImode, REGNO (operands[2]))); operands[1] = gen_lowpart (HImode, operands[1]);
} }
else
if (aligned_memory_operand (operands[0], <MODE>mode))
{ {
rtx addr; emit_insn (gen_reload_out<reloadmode>_aligned
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
if (REGNO (operands[0]) == REGNO (operands[2]))
scratch = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
else
scratch = gen_rtx_REG (DImode, REGNO (operands[2]));
addr = get_unaligned_address (operands[1]);
operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
seq = gen_unaligned_loadhi (operands[0], addr, scratch, operands[0]);
alpha_set_memflags (seq, operands[1]);
}
emit_insn (seq);
DONE;
})
(define_expand "reload_outqi"
[(parallel [(match_operand:QI 0 "any_memory_operand" "=m")
(match_operand:QI 1 "register_operand" "r")
(match_operand:TI 2 "register_operand" "=&r")])]
"! TARGET_BWX"
{
if (aligned_memory_operand (operands[0], QImode))
{
emit_insn (gen_reload_outqi_help
(operands[0], operands[1], (operands[0], operands[1],
gen_rtx_REG (SImode, REGNO (operands[2])), gen_rtx_REG (SImode, regno),
gen_rtx_REG (SImode, REGNO (operands[2]) + 1))); gen_rtx_REG (SImode, regno + 1)));
} }
else else
{ {
rtx addr = get_unaligned_address (operands[0]); rtx addr = get_unaligned_address (operands[0]);
rtx scratch1 = gen_rtx_REG (DImode, REGNO (operands[2])); rtx scratch1 = gen_rtx_REG (DImode, regno);
rtx scratch2 = gen_rtx_REG (DImode, REGNO (operands[2]) + 1); rtx scratch2 = gen_rtx_REG (DImode, regno + 1);
rtx scratch3 = scratch1; rtx scratch3 = scratch1;
rtx seq; rtx seq;
if (GET_CODE (addr) == REG) if (GET_CODE (addr) == REG)
scratch1 = addr; scratch1 = addr;
seq = gen_unaligned_storeqi (addr, operands[1], scratch1, seq = gen_unaligned_store<reloadmode> (addr, operands[1], scratch1,
scratch2, scratch3); scratch2, scratch3);
alpha_set_memflags (seq, operands[0]);
emit_insn (seq);
}
DONE;
})
(define_expand "reload_outhi"
[(parallel [(match_operand:HI 0 "any_memory_operand" "=m")
(match_operand:HI 1 "register_operand" "r")
(match_operand:TI 2 "register_operand" "=&r")])]
"! TARGET_BWX"
{
if (aligned_memory_operand (operands[0], HImode))
{
emit_insn (gen_reload_outhi_help
(operands[0], operands[1],
gen_rtx_REG (SImode, REGNO (operands[2])),
gen_rtx_REG (SImode, REGNO (operands[2]) + 1)));
}
else
{
rtx addr = get_unaligned_address (operands[0]);
rtx scratch1 = gen_rtx_REG (DImode, REGNO (operands[2]));
rtx scratch2 = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
rtx scratch3 = scratch1;
rtx seq;
if (GET_CODE (addr) == REG)
scratch1 = addr;
seq = gen_unaligned_storehi (addr, operands[1], scratch1,
scratch2, scratch3);
alpha_set_memflags (seq, operands[0]); alpha_set_memflags (seq, operands[0]);
emit_insn (seq); emit_insn (seq);
} }
@ -6225,65 +6219,30 @@
;; always get a proper address for a stack slot during reload_foo ;; always get a proper address for a stack slot during reload_foo
;; expansion, so we must delay our address manipulations until after. ;; expansion, so we must delay our address manipulations until after.
(define_insn_and_split "reload_inqi_help" (define_insn_and_split "reload_in<mode>_aligned"
[(set (match_operand:QI 0 "register_operand" "=r") [(set (match_operand:I12MODE 0 "register_operand" "=r")
(match_operand:QI 1 "memory_operand" "m")) (match_operand:I12MODE 1 "memory_operand" "m"))]
(clobber (match_operand:SI 2 "register_operand" "=r"))] "!TARGET_BWX && (reload_in_progress || reload_completed)"
"! TARGET_BWX && (reload_in_progress || reload_completed)"
"#" "#"
"! TARGET_BWX && reload_completed" "!TARGET_BWX && reload_completed"
[(const_int 0)] [(const_int 0)]
{ {
rtx aligned_mem, bitnum; rtx aligned_mem, bitnum;
get_aligned_mem (operands[1], &aligned_mem, &bitnum); get_aligned_mem (operands[1], &aligned_mem, &bitnum);
operands[0] = gen_lowpart (DImode, operands[0]); emit_insn (gen_aligned_load<reloadmode>
emit_insn (gen_aligned_loadqi (operands[0], aligned_mem, bitnum, (gen_lowpart (DImode, operands[0]), aligned_mem, bitnum,
operands[2])); gen_rtx_REG (SImode, REGNO (operands[0]))));
DONE; DONE;
}) })
(define_insn_and_split "reload_inhi_help" (define_insn_and_split "reload_out<mode>_aligned"
[(set (match_operand:HI 0 "register_operand" "=r") [(set (match_operand:I12MODE 0 "memory_operand" "=m")
(match_operand:HI 1 "memory_operand" "m")) (match_operand:I12MODE 1 "register_operand" "r"))
(clobber (match_operand:SI 2 "register_operand" "=r"))]
"! TARGET_BWX && (reload_in_progress || reload_completed)"
"#"
"! TARGET_BWX && reload_completed"
[(const_int 0)]
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[1], &aligned_mem, &bitnum);
operands[0] = gen_lowpart (DImode, operands[0]);
emit_insn (gen_aligned_loadhi (operands[0], aligned_mem, bitnum,
operands[2]));
DONE;
})
(define_insn_and_split "reload_outqi_help"
[(set (match_operand:QI 0 "memory_operand" "=m")
(match_operand:QI 1 "register_operand" "r"))
(clobber (match_operand:SI 2 "register_operand" "=r")) (clobber (match_operand:SI 2 "register_operand" "=r"))
(clobber (match_operand:SI 3 "register_operand" "=r"))] (clobber (match_operand:SI 3 "register_operand" "=r"))]
"! TARGET_BWX && (reload_in_progress || reload_completed)" "!TARGET_BWX && (reload_in_progress || reload_completed)"
"#" "#"
"! TARGET_BWX && reload_completed" "!TARGET_BWX && reload_completed"
[(const_int 0)]
{
rtx aligned_mem, bitnum;
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
operands[2], operands[3]));
DONE;
})
(define_insn_and_split "reload_outhi_help"
[(set (match_operand:HI 0 "memory_operand" "=m")
(match_operand:HI 1 "register_operand" "r"))
(clobber (match_operand:SI 2 "register_operand" "=r"))
(clobber (match_operand:SI 3 "register_operand" "=r"))]
"! TARGET_BWX && (reload_in_progress || reload_completed)"
"#"
"! TARGET_BWX && reload_completed"
[(const_int 0)] [(const_int 0)]
{ {
rtx aligned_mem, bitnum; rtx aligned_mem, bitnum;

View File

@ -434,7 +434,7 @@
;; use recog during reload, so pretending these codes are accepted ;; use recog during reload, so pretending these codes are accepted
;; pessimizes things a tad. ;; pessimizes things a tad.
(define_predicate "aligned_memory_operand" (define_special_predicate "aligned_memory_operand"
(ior (match_test "op = resolve_reload_operand (op), 0") (ior (match_test "op = resolve_reload_operand (op), 0")
(match_code "mem")) (match_code "mem"))
{ {
@ -462,7 +462,7 @@
;; Similar, but return 1 if OP is a MEM which is not alignable. ;; Similar, but return 1 if OP is a MEM which is not alignable.
(define_predicate "unaligned_memory_operand" (define_special_predicate "unaligned_memory_operand"
(ior (match_test "op = resolve_reload_operand (op), 0") (ior (match_test "op = resolve_reload_operand (op), 0")
(match_code "mem")) (match_code "mem"))
{ {
@ -489,20 +489,30 @@
}) })
;; Return 1 if OP is any memory location. During reload a pseudo matches. ;; Return 1 if OP is any memory location. During reload a pseudo matches.
(define_predicate "any_memory_operand" (define_special_predicate "any_memory_operand"
(ior (match_code "mem,reg") (match_code "mem,reg,subreg")
(and (match_code "subreg") {
(match_test "GET_CODE (SUBREG_REG (op)) == REG")))) if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
;; Return 1 if OP is either a register or an unaligned memory location. if (MEM_P (op))
(define_predicate "reg_or_unaligned_mem_operand" return true;
(ior (match_operand 0 "register_operand") if (reload_in_progress && REG_P (op))
(match_operand 0 "unaligned_memory_operand"))) {
unsigned regno = REGNO (op);
if (HARD_REGISTER_NUM_P (regno))
return false;
else
return reg_renumber[regno] < 0;
}
return false;
})
;; Return 1 is OP is a memory location that is not a reference ;; Return 1 is OP is a memory location that is not a reference
;; (using an AND) to an unaligned location. Take into account ;; (using an AND) to an unaligned location. Take into account
;; what reload will do. ;; what reload will do.
(define_predicate "normal_memory_operand" (define_special_predicate "normal_memory_operand"
(ior (match_test "op = resolve_reload_operand (op), 0") (ior (match_test "op = resolve_reload_operand (op), 0")
(and (match_code "mem") (and (match_code "mem")
(match_test "GET_CODE (XEXP (op, 0)) != AND")))) (match_test "GET_CODE (XEXP (op, 0)) != AND"))))

View File

@ -19,10 +19,6 @@
;; the Free Software Foundation, 51 Franklin Street, Fifth Floor, ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
;; Boston, MA 02110-1301, USA. ;; Boston, MA 02110-1301, USA.
(define_mode_macro I12MODE [QI HI])
(define_mode_macro I48MODE [SI DI])
(define_mode_attr modesuffix [(SI "l") (DI "q")])
(define_code_macro FETCHOP [plus minus ior xor and]) (define_code_macro FETCHOP [plus minus ior xor and])
(define_code_attr fetchop_name (define_code_attr fetchop_name
[(plus "add") (minus "sub") (ior "ior") (xor "xor") (and "and")]) [(plus "add") (minus "sub") (ior "ior") (xor "xor") (and "and")])