arm.c (arm_legitimate_address_p): New argument, OUTER.

* arm.c (arm_legitimate_address_p): New argument, OUTER.  Pass through
to arm_legitimate_index_p.  Update all callers with SET as default
value.
(arm_legitimate_index_p): New argument, OUTER.  Restrict the index
range if OUTER is a sign-extend operation on QImode.  Correctly
reject shift operations on sign-extended QImode addresses.
(bad_signed_byte_operand): Delete.
(arm_extendqisi_mem_op): New function.
* arm.h (EXTRA_CONSTRAINT_ARM): Delete.  Replace with...
(EXTRA_CONSTRAINT_STR_ARM): ... this.  Handle extended address
constraints.
(CONSTRAINT_LEN): New.
(EXTRA_CONSTRAINT): Delete.  Replace with...
(EXTRA_CONSTRAINT_STR): ... this.
(PREDICATE_CODES): Remove bad_signed_byte_operand.
* arm.md (extendqihi_insn): Use new constraint Uq.  Rework.  Length
is now always default.
(define_splits for bad sign-extend loads): Delete.
(arm_extendqisi, arm_extendqisi_v5): Likewise.
* arm/vfp.md (arm_movsi_vfp, arm_movdi_vfp, movsf_vfp, movdf_vfp):
Rework 'U' constraint to 'Uv'.
* arm-protos.h: Remove bad_signed_byte_operand.  Add
arm_extendqisi_mem_op.
* doc/md.texi (ARM constraints): Rename VFP constraint (now Uv).
Add Uq constraint.

From-SVN: r79440
This commit is contained in:
Richard Earnshaw 2004-03-13 11:19:23 +00:00 committed by Richard Earnshaw
parent fa78dbea45
commit 1e1ab407a6
7 changed files with 121 additions and 190 deletions

View File

@ -1,3 +1,31 @@
2004-03-13 Richard Earnshaw <rearnsha@arm.com>
* arm.c (arm_legitimate_address_p): New argument, OUTER. Pass through
to arm_legitimate_index_p. Update all callers with SET as default
value.
(arm_legitimate_index_p): New argument, OUTER. Restrict the index
range if OUTER is a sign-extend operation on QImode. Correctly
reject shift operations on sign-extended QImode addresses.
(bad_signed_byte_operand): Delete.
(arm_extendqisi_mem_op): New function.
* arm.h (EXTRA_CONSTRAINT_ARM): Delete. Replace with...
(EXTRA_CONSTRAINT_STR_ARM): ... this. Handle extended address
constraints.
(CONSTRAINT_LEN): New.
(EXTRA_CONSTRAINT): Delete. Replace with...
(EXTRA_CONSTRAINT_STR): ... this.
(PREDICATE_CODES): Remove bad_signed_byte_operand.
* arm.md (extendqihi_insn): Use new constraint Uq. Rework. Length
is now always default.
(define_splits for bad sign-extend loads): Delete.
(arm_extendqisi, arm_extendqisi_v5): Likewise.
* arm/vfp.md (arm_movsi_vfp, arm_movdi_vfp, movsf_vfp, movdf_vfp):
Rework 'U' constraint to 'Uv'.
* arm-protos.h: Remove bad_signed_byte_operand. Add
arm_extendqisi_mem_op.
* doc/md.texi (ARM constraints): Rename VFP constraint (now Uv).
Add Uq constraint.
2004-03-13 Alan Modra <amodra@bigpond.net.au>
* config/rs6000/rs6000.c (rs6000_va_arg): Replace SPLIT_COMPLEX_ARGS

View File

@ -50,7 +50,7 @@ extern int arm_split_constant (RTX_CODE, enum machine_mode, HOST_WIDE_INT, rtx,
extern RTX_CODE arm_canonicalize_comparison (RTX_CODE, rtx *);
extern int legitimate_pic_operand_p (rtx);
extern rtx legitimize_pic_address (rtx, enum machine_mode, rtx);
extern int arm_legitimate_address_p (enum machine_mode, rtx, int);
extern int arm_legitimate_address_p (enum machine_mode, rtx, RTX_CODE, int);
extern int thumb_legitimate_address_p (enum machine_mode, rtx, int);
extern int thumb_legitimate_offset_p (enum machine_mode, HOST_WIDE_INT);
extern rtx arm_legitimize_address (rtx, rtx, enum machine_mode);
@ -71,9 +71,9 @@ extern int arm_rhsm_operand (rtx, enum machine_mode);
extern int arm_add_operand (rtx, enum machine_mode);
extern int arm_addimm_operand (rtx, enum machine_mode);
extern int arm_not_operand (rtx, enum machine_mode);
extern int arm_extendqisi_mem_op (rtx, enum machine_mode);
extern int offsettable_memory_operand (rtx, enum machine_mode);
extern int alignable_memory_operand (rtx, enum machine_mode);
extern int bad_signed_byte_operand (rtx, enum machine_mode);
extern int arm_float_rhs_operand (rtx, enum machine_mode);
extern int arm_float_add_operand (rtx, enum machine_mode);
extern int power_of_two_operand (rtx, enum machine_mode);

View File

@ -64,7 +64,7 @@ static int arm_gen_constant (enum rtx_code, enum machine_mode, HOST_WIDE_INT,
rtx, rtx, int, int);
static unsigned bit_count (unsigned long);
static int arm_address_register_rtx_p (rtx, int);
static int arm_legitimate_index_p (enum machine_mode, rtx, int);
static int arm_legitimate_index_p (enum machine_mode, rtx, RTX_CODE, int);
static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
inline static int thumb_index_register_rtx_p (rtx, int);
static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
@ -2719,7 +2719,7 @@ legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
{
/* The base register doesn't really matter, we only want to
test the index for the appropriate mode. */
if (!arm_legitimate_index_p (mode, offset, 0))
if (!arm_legitimate_index_p (mode, offset, SET, 0))
{
if (!no_new_pseudos)
offset = force_reg (Pmode, offset);
@ -2824,7 +2824,8 @@ arm_address_register_rtx_p (rtx x, int strict_p)
/* Return nonzero if X is a valid ARM state address operand. */
int
arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
arm_legitimate_address_p (enum machine_mode mode, rtx x, RTX_CODE outer,
int strict_p)
{
if (arm_address_register_rtx_p (x, strict_p))
return 1;
@ -2837,7 +2838,8 @@ arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
&& arm_address_register_rtx_p (XEXP (x, 0), strict_p)
&& GET_CODE (XEXP (x, 1)) == PLUS
&& XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), outer,
strict_p);
/* After reload constants split into minipools will have addresses
from a LABEL_REF. */
@ -2889,9 +2891,9 @@ arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
rtx xop1 = XEXP (x, 1);
return ((arm_address_register_rtx_p (xop0, strict_p)
&& arm_legitimate_index_p (mode, xop1, strict_p))
&& arm_legitimate_index_p (mode, xop1, outer, strict_p))
|| (arm_address_register_rtx_p (xop1, strict_p)
&& arm_legitimate_index_p (mode, xop0, strict_p)));
&& arm_legitimate_index_p (mode, xop0, outer, strict_p)));
}
#if 0
@ -2902,7 +2904,7 @@ arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
rtx xop1 = XEXP (x, 1);
return (arm_address_register_rtx_p (xop0, strict_p)
&& arm_legitimate_index_p (mode, xop1, strict_p));
&& arm_legitimate_index_p (mode, xop1, outer, strict_p));
}
#endif
@ -2924,7 +2926,8 @@ arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
/* Return nonzero if INDEX is valid for an address index operand in
ARM state. */
static int
arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
arm_legitimate_index_p (enum machine_mode mode, rtx index, RTX_CODE outer,
int strict_p)
{
HOST_WIDE_INT range;
enum rtx_code code = GET_CODE (index);
@ -2949,38 +2952,42 @@ arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
&& INTVAL (index) < 256
&& INTVAL (index) > -256);
/* XXX What about ldrsb? */
if (GET_MODE_SIZE (mode) <= 4 && code == MULT
&& (!arm_arch4 || (mode) != HImode))
{
rtx xiop0 = XEXP (index, 0);
rtx xiop1 = XEXP (index, 1);
return ((arm_address_register_rtx_p (xiop0, strict_p)
&& power_of_two_operand (xiop1, SImode))
|| (arm_address_register_rtx_p (xiop1, strict_p)
&& power_of_two_operand (xiop0, SImode)));
}
if (GET_MODE_SIZE (mode) <= 4
&& (code == LSHIFTRT || code == ASHIFTRT
|| code == ASHIFT || code == ROTATERT)
&& (!arm_arch4 || (mode) != HImode))
&& ! (arm_arch4
&& (mode == HImode
|| (mode == QImode && outer == SIGN_EXTEND))))
{
rtx op = XEXP (index, 1);
if (code == MULT)
{
rtx xiop0 = XEXP (index, 0);
rtx xiop1 = XEXP (index, 1);
return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
&& GET_CODE (op) == CONST_INT
&& INTVAL (op) > 0
&& INTVAL (op) <= 31);
return ((arm_address_register_rtx_p (xiop0, strict_p)
&& power_of_two_operand (xiop1, SImode))
|| (arm_address_register_rtx_p (xiop1, strict_p)
&& power_of_two_operand (xiop0, SImode)));
}
else if (code == LSHIFTRT || code == ASHIFTRT
|| code == ASHIFT || code == ROTATERT)
{
rtx op = XEXP (index, 1);
return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
&& GET_CODE (op) == CONST_INT
&& INTVAL (op) > 0
&& INTVAL (op) <= 31);
}
}
/* XXX For ARM v4 we may be doing a sign-extend operation during the
load, but that has a restricted addressing range and we are unable
to tell here whether that is the case. To be safe we restrict all
loads to that range. */
/* For ARM v4 we may be doing a sign-extend operation during the
load. */
if (arm_arch4)
range = (mode == HImode || mode == QImode) ? 256 : 4096;
{
if (mode == HImode || (outer == SIGN_EXTEND && mode == QImode))
range = 256;
else
range = 4096;
}
else
range = (mode == HImode) ? 4095 : 4096;
@ -4211,37 +4218,6 @@ arm_reload_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)));
}
/* Return 1 if OP is a valid memory address, but not valid for a signed byte
memory access (architecture V4).
MODE is QImode if called when computing constraints, or VOIDmode when
emitting patterns. In this latter case we cannot use memory_operand()
because it will fail on badly formed MEMs, which is precisely what we are
trying to catch. */
int
bad_signed_byte_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
{
if (GET_CODE (op) != MEM)
return 0;
op = XEXP (op, 0);
/* A sum of anything more complex than reg + reg or reg + const is bad. */
if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
&& (!s_register_operand (XEXP (op, 0), VOIDmode)
|| (!s_register_operand (XEXP (op, 1), VOIDmode)
&& GET_CODE (XEXP (op, 1)) != CONST_INT)))
return 1;
/* Big constants are also bad. */
if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
&& (INTVAL (XEXP (op, 1)) > 0xff
|| -INTVAL (XEXP (op, 1)) > 0xff))
return 1;
/* Everything else is good, or can will automatically be made so. */
return 0;
}
/* Return TRUE for valid operands for the rhs of an ARM instruction. */
int
arm_rhs_operand (rtx op, enum machine_mode mode)
@ -4434,6 +4410,15 @@ cirrus_memory_offset (rtx op)
return 0;
}
int
arm_extendqisi_mem_op (rtx op, enum machine_mode mode)
{
if (!memory_operand (op, mode))
return 0;
return arm_legitimate_address_p (mode, XEXP (op, 0), SIGN_EXTEND, 0);
}
/* Return nonzero if OP is a Cirrus or general register. */
int
cirrus_register_operand (rtx op, enum machine_mode mode)

View File

@ -1471,23 +1471,30 @@ enum reg_class
accessed without using a load.
'U' is an address valid for VFP load/store insns. */
#define EXTRA_CONSTRAINT_ARM(OP, C) \
((C) == 'Q' ? GET_CODE (OP) == MEM && GET_CODE (XEXP (OP, 0)) == REG : \
(C) == 'R' ? (GET_CODE (OP) == MEM \
&& GET_CODE (XEXP (OP, 0)) == SYMBOL_REF \
&& CONSTANT_POOL_ADDRESS_P (XEXP (OP, 0))) : \
(C) == 'S' ? (optimize > 0 && CONSTANT_ADDRESS_P (OP)) : \
(C) == 'T' ? cirrus_memory_offset (OP) : \
(C) == 'U' ? vfp_mem_operand (OP) : \
0)
#define EXTRA_CONSTRAINT_STR_ARM(OP, C, STR) \
(((C) == 'Q') ? (GET_CODE (OP) == MEM \
&& GET_CODE (XEXP (OP, 0)) == REG) : \
((C) == 'R') ? (GET_CODE (OP) == MEM \
&& GET_CODE (XEXP (OP, 0)) == SYMBOL_REF \
&& CONSTANT_POOL_ADDRESS_P (XEXP (OP, 0))) : \
((C) == 'S') ? (optimize > 0 && CONSTANT_ADDRESS_P (OP)) : \
((C) == 'T') ? cirrus_memory_offset (OP) : \
((C) == 'U' && (STR)[1] == 'v') ? vfp_mem_operand (OP) : \
((C) == 'U' && (STR)[1] == 'q') \
? arm_extendqisi_mem_op (OP, GET_MODE (OP)) \
: 0)
#define CONSTRAINT_LEN(C,STR) \
((C) == 'U' ? 2 : DEFAULT_CONSTRAINT_LEN (C, STR))
#define EXTRA_CONSTRAINT_THUMB(X, C) \
((C) == 'Q' ? (GET_CODE (X) == MEM \
&& GET_CODE (XEXP (X, 0)) == LABEL_REF) : 0)
#define EXTRA_CONSTRAINT(X, C) \
(TARGET_ARM ? \
EXTRA_CONSTRAINT_ARM (X, C) : EXTRA_CONSTRAINT_THUMB (X, C))
#define EXTRA_CONSTRAINT_STR(X, C, STR) \
(TARGET_ARM \
? EXTRA_CONSTRAINT_STR_ARM (X, C, STR) \
: EXTRA_CONSTRAINT_THUMB (X, C))
#define EXTRA_MEMORY_CONSTRAINT(C, STR) ((C) == 'U')
@ -2336,7 +2343,7 @@ typedef struct
#define ARM_GO_IF_LEGITIMATE_ADDRESS(MODE,X,WIN) \
{ \
if (arm_legitimate_address_p (MODE, X, REG_STRICT_P)) \
if (arm_legitimate_address_p (MODE, X, SET, REG_STRICT_P)) \
goto WIN; \
}
@ -2817,7 +2824,6 @@ extern int making_const_table;
{"thumb_cmpneg_operand", {CONST_INT}}, \
{"thumb_cbrch_target_operand", {SUBREG, REG, MEM}}, \
{"offsettable_memory_operand", {MEM}}, \
{"bad_signed_byte_operand", {MEM}}, \
{"alignable_memory_operand", {MEM}}, \
{"shiftable_operator", {PLUS, MINUS, AND, IOR, XOR}}, \
{"minmax_operator", {SMIN, SMAX, UMIN, UMAX}}, \

View File

@ -3748,59 +3748,17 @@
}"
)
; Rather than restricting all byte accesses to memory addresses that ldrsb
; can handle, we fix up the ones that ldrsb can't grok with a split.
(define_insn "*extendqihi_insn"
[(set (match_operand:HI 0 "s_register_operand" "=r")
(sign_extend:HI (match_operand:QI 1 "memory_operand" "m")))]
[(set (match_operand:HI 0 "s_register_operand" "=r")
(sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
"TARGET_ARM && arm_arch4"
"*
/* If the address is invalid, this will split the instruction into two. */
if (bad_signed_byte_operand (operands[1], VOIDmode))
return \"#\";
return \"ldr%?sb\\t%0, %1\";
"
"ldr%?sb\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "length" "8")
(set_attr "pool_range" "256")
(set_attr "neg_pool_range" "244")]
)
(define_split
[(set (match_operand:HI 0 "s_register_operand" "")
(sign_extend:HI (match_operand:QI 1 "bad_signed_byte_operand" "")))]
"TARGET_ARM && arm_arch4 && reload_completed"
[(set (match_dup 3) (match_dup 1))
(set (match_dup 0) (sign_extend:HI (match_dup 2)))]
"
{
HOST_WIDE_INT offset;
operands[3] = gen_rtx_REG (SImode, REGNO (operands[0]));
operands[2] = gen_rtx_MEM (QImode, operands[3]);
MEM_COPY_ATTRIBUTES (operands[2], operands[1]);
operands[1] = XEXP (operands[1], 0);
if (GET_CODE (operands[1]) == PLUS
&& GET_CODE (XEXP (operands[1], 1)) == CONST_INT
&& !(const_ok_for_arm (offset = INTVAL (XEXP (operands[1], 1)))
|| const_ok_for_arm (-offset)))
{
HOST_WIDE_INT low = (offset > 0
? (offset & 0xff) : -((-offset) & 0xff));
XEXP (operands[2], 0) = plus_constant (operands[3], low);
operands[1] = plus_constant (XEXP (operands[1], 0), offset - low);
}
/* Ensure the sum is in correct canonical form. */
else if (GET_CODE (operands[1]) == PLUS
&& GET_CODE (XEXP (operands[1], 1)) != CONST_INT
&& !s_register_operand (XEXP (operands[1], 1), VOIDmode))
operands[1] = gen_rtx_PLUS (GET_MODE (operands[1]),
XEXP (operands[1], 1),
XEXP (operands[1], 0));
}"
)
(define_expand "extendqisi2"
[(set (match_dup 2)
(ashift:SI (match_operand:QI 1 "general_operand" "")
@ -3833,42 +3791,26 @@
}"
)
; Rather than restricting all byte accesses to memory addresses that ldrsb
; can handle, we fix up the ones that ldrsb can't grok with a split.
(define_insn "*arm_extendqisi"
[(set (match_operand:SI 0 "s_register_operand" "=r")
(sign_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
(sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
"TARGET_ARM && arm_arch4 && !arm_arch6"
"*
/* If the address is invalid, this will split the instruction into two. */
if (bad_signed_byte_operand (operands[1], VOIDmode))
return \"#\";
return \"ldr%?sb\\t%0, %1\";
"
"ldr%?sb\\t%0, %1"
[(set_attr "type" "load_byte")
(set_attr "predicable" "yes")
(set_attr "length" "8")
(set_attr "pool_range" "256")
(set_attr "neg_pool_range" "244")]
)
(define_insn "*arm_extendqisi_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
"TARGET_ARM && arm_arch6"
"*
if (which_alternative == 0)
return \"sxtb%?\\t%0, %1\";
/* If the address is invalid, this will split the instruction into two. */
if (bad_signed_byte_operand (operands[1], VOIDmode))
return \"#\";
return \"ldr%?sb\\t%0, %1\";
"
"@
sxtb%?\\t%0, %1
ldr%?sb\\t%0, %1"
[(set_attr "type" "alu_shift,load_byte")
(set_attr "predicable" "yes")
(set_attr "length" "4,8")
(set_attr "pool_range" "*,256")
(set_attr "neg_pool_range" "*,244")]
)
@ -3883,39 +3825,6 @@
(set_attr "predicable" "yes")]
)
(define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(sign_extend:SI (match_operand:QI 1 "bad_signed_byte_operand" "")))]
"TARGET_ARM && arm_arch4 && reload_completed"
[(set (match_dup 0) (match_dup 1))
(set (match_dup 0) (sign_extend:SI (match_dup 2)))]
"
{
HOST_WIDE_INT offset;
operands[2] = gen_rtx_MEM (QImode, operands[0]);
MEM_COPY_ATTRIBUTES (operands[2], operands[1]);
operands[1] = XEXP (operands[1], 0);
if (GET_CODE (operands[1]) == PLUS
&& GET_CODE (XEXP (operands[1], 1)) == CONST_INT
&& !(const_ok_for_arm (offset = INTVAL (XEXP (operands[1], 1)))
|| const_ok_for_arm (-offset)))
{
HOST_WIDE_INT low = (offset > 0
? (offset & 0xff) : -((-offset) & 0xff));
XEXP (operands[2], 0) = plus_constant (operands[0], low);
operands[1] = plus_constant (XEXP (operands[1], 0), offset - low);
}
/* Ensure the sum is in correct canonical form. */
else if (GET_CODE (operands[1]) == PLUS
&& GET_CODE (XEXP (operands[1], 1)) != CONST_INT
&& !s_register_operand (XEXP (operands[1], 1), VOIDmode))
operands[1] = gen_rtx_PLUS (GET_MODE (operands[1]),
XEXP (operands[1], 1),
XEXP (operands[1], 0));
}"
)
(define_insn "*thumb_extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=l,l")
(sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]

View File

@ -111,8 +111,8 @@
;; ??? For now do not allow loading constants into vfp regs. This causes
;; problems because small constants get converted into adds.
(define_insn "*arm_movsi_vfp"
[(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r ,m,!w,r,!w,!w, U")
(match_operand:SI 1 "general_operand" "rI,K,mi,r,r,!w,!w,Ui,!w"))]
[(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r ,m,!w,r,!w,!w, Uv")
(match_operand:SI 1 "general_operand" "rI,K,mi,r,r,!w,!w,Uvi,!w"))]
"TARGET_ARM && TARGET_VFP && TARGET_HARD_FLOAT
&& ( s_register_operand (operands[0], SImode)
|| s_register_operand (operands[1], SImode))"
@ -136,8 +136,8 @@
;; DImode moves
(define_insn "*arm_movdi_vfp"
[(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r,o<>,w,r,w,w ,U")
(match_operand:DI 1 "di_operand" "rIK,mi,r ,r,w,w,Ui,w"))]
[(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r,o<>,w,r,w,w ,Uv")
(match_operand:DI 1 "di_operand" "rIK,mi,r ,r,w,w,Uvi,w"))]
"TARGET_ARM && TARGET_HARD_FLOAT && TARGET_VFP"
"*
switch (which_alternative)
@ -168,8 +168,8 @@
;; SFmode moves
(define_insn "*movsf_vfp"
[(set (match_operand:SF 0 "nonimmediate_operand" "=w,r,w ,U,r ,m,w,r")
(match_operand:SF 1 "general_operand" " r,w,UE,w,mE,r,w,r"))]
[(set (match_operand:SF 0 "nonimmediate_operand" "=w,r,w ,Uv,r ,m,w,r")
(match_operand:SF 1 "general_operand" " r,w,UvE,w, mE,r,w,r"))]
"TARGET_ARM && TARGET_HARD_FLOAT && TARGET_VFP
&& ( s_register_operand (operands[0], SFmode)
|| s_register_operand (operands[1], SFmode))"
@ -192,8 +192,8 @@
;; DFmode moves
(define_insn "*movdf_vfp"
[(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=w,r,r, m,w ,U,w,r")
(match_operand:DF 1 "soft_df_operand" " r,w,mF,r,UF,w,w,r"))]
[(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=w,r,r, m,w ,Uv,w,r")
(match_operand:DF 1 "soft_df_operand" " r,w,mF,r,UvF,w, w,r"))]
"TARGET_ARM && TARGET_HARD_FLOAT && TARGET_VFP"
"*
{

View File

@ -1360,9 +1360,12 @@ An item in the constant pool
A symbol in the text segment of the current file
@end table
@item U
@item Uv
A memory reference suitable for VFP load/store insns (reg+constant offset)
@item Uq
A memory reference suitable for for the ARMv4 ldrsb instruction.
@item AVR family---@file{avr.h}
@table @code
@item l