re PR target/49487 ([avr] ICE in bytewise rotate)

PR target/49487
	* config/avr/avr.md (rotl<mode>3): Generate SCRATCH instead
	of REG.
	(*rotw<mode>): Use const_int_operand for operand2.
	Use match_scatch for operand3.
	(*rotb<mode>): Ditto
	* config/avr/avr.c (avr_rotate_bytes): Treat SCRATCH.

From-SVN: r176276
This commit is contained in:
Georg-Johann Lay 2011-07-14 15:10:12 +00:00 committed by Georg-Johann Lay
parent 1a5d20a4e0
commit d772f97cb6
3 changed files with 47 additions and 29 deletions

View File

@ -1,3 +1,13 @@
2011-07-14 Georg-Johann Lay <avr@gjlay.de>
PR target/49487
* config/avr/avr.md (rotl<mode>3): Generate SCRATCH instead
of REG.
(*rotw<mode>): Use const_int_operand for operand2.
Use match_scatch for operand3.
(*rotb<mode>): Ditto
* config/avr/avr.c (avr_rotate_bytes): Treat SCRATCH.
2011-07-14 Richard Guenther <rguenther@suse.de>
PR tree-optimization/49651

View File

@ -4438,7 +4438,9 @@ avr_rotate_bytes (rtx operands[])
if (mode == DImode)
move_mode = QImode;
/* Make scratch smaller if needed. */
if (GET_MODE (scratch) == HImode && move_mode == QImode)
if (SCRATCH != GET_CODE (scratch)
&& HImode == GET_MODE (scratch)
&& QImode == move_mode)
scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
move_size = GET_MODE_SIZE (move_mode);
@ -4534,6 +4536,8 @@ avr_rotate_bytes (rtx operands[])
When this move occurs, it will break chain deadlock.
The scratch register is substituted for real move. */
gcc_assert (SCRATCH != GET_CODE (scratch));
move[size].src = move[blocked].dst;
move[size].dst = scratch;
/* Scratch move is never blocked. */

View File

@ -1597,18 +1597,18 @@
(match_operand:VOID 2 "const_int_operand" "")))
(clobber (match_dup 3))])]
""
"
{
if (CONST_INT_P (operands[2]) && 0 == (INTVAL (operands[2]) % 8))
{
if (AVR_HAVE_MOVW && 0 == INTVAL (operands[2]) % 16)
operands[3] = gen_reg_rtx (<rotsmode>mode);
else
operands[3] = gen_reg_rtx (QImode);
}
else
FAIL;
}")
if (CONST_INT_P (operands[2])
&& 0 == INTVAL (operands[2]) % 8)
{
if (AVR_HAVE_MOVW && 0 == INTVAL (operands[2]) % 16)
operands[3] = gen_rtx_SCRATCH (<rotsmode>mode);
else
operands[3] = gen_rtx_SCRATCH (QImode);
}
else
FAIL;
})
;; Overlapping non-HImode registers often (but not always) need a scratch.
@ -1620,34 +1620,38 @@
; Split word aligned rotates using scratch that is mode dependent.
(define_insn_and_split "*rotw<mode>"
[(set (match_operand:HIDI 0 "register_operand" "=r,r,#&r")
(rotate:HIDI (match_operand:HIDI 1 "register_operand" "0,r,r")
(match_operand 2 "immediate_operand" "n,n,n")))
(clobber (match_operand:<rotsmode> 3 "register_operand" "=<rotx>" ))]
"(CONST_INT_P (operands[2]) &&
(0 == (INTVAL (operands[2]) % 16) && AVR_HAVE_MOVW))"
(rotate:HIDI (match_operand:HIDI 1 "register_operand" "0,r,r")
(match_operand 2 "const_int_operand" "n,n,n")))
(clobber (match_scratch:<rotsmode> 3 "=<rotx>"))]
"AVR_HAVE_MOVW
&& CONST_INT_P (operands[2])
&& 0 == INTVAL (operands[2]) % 16"
"#"
"&& (reload_completed || <MODE>mode == DImode)"
[(const_int 0)]
"avr_rotate_bytes (operands);
DONE;"
)
{
avr_rotate_bytes (operands);
DONE;
})
; Split byte aligned rotates using scratch that is always QI mode.
(define_insn_and_split "*rotb<mode>"
[(set (match_operand:HIDI 0 "register_operand" "=r,r,#&r")
(rotate:HIDI (match_operand:HIDI 1 "register_operand" "0,r,r")
(match_operand 2 "immediate_operand" "n,n,n")))
(clobber (match_operand:QI 3 "register_operand" "=<rotx>" ))]
"(CONST_INT_P (operands[2]) &&
(8 == (INTVAL (operands[2]) % 16)
|| (!AVR_HAVE_MOVW && 0 == (INTVAL (operands[2]) % 16))))"
(rotate:HIDI (match_operand:HIDI 1 "register_operand" "0,r,r")
(match_operand 2 "const_int_operand" "n,n,n")))
(clobber (match_scratch:QI 3 "=<rotx>"))]
"CONST_INT_P (operands[2])
&& (8 == INTVAL (operands[2]) % 16
|| (!AVR_HAVE_MOVW
&& 0 == INTVAL (operands[2]) % 16))"
"#"
"&& (reload_completed || <MODE>mode == DImode)"
[(const_int 0)]
"avr_rotate_bytes (operands);
DONE;"
)
{
avr_rotate_bytes (operands);
DONE;
})
;;<< << << << << << << << << << << << << << << << << << << << << << << << << <<