predicates.md (spu_mov_operand): Add.

* config/spu/predicates.md (spu_mov_operand): Add.
        * config/spu/spu.c (spu_expand_extv): Remove unused code.
        (print_operand_address, print_operand): Handle addresses containing AND.
        (spu_split_load, spu_split_store): Use updated movti pattern.
        * config/spu/spu.md: (_mov<mode>, _movdi, _movti): Handle loads and
        stores in mov patterns for correct operation of reload.
        (lq, lq_<mode>, stq, stq_<mode>): Remove.

From-SVN: r119421
This commit is contained in:
Trevor Smigiel 2006-12-01 22:43:18 +00:00 committed by Trevor Smigiel
parent 01c15146e4
commit 09aad82b44
4 changed files with 52 additions and 96 deletions

View File

@ -1,3 +1,13 @@
2006-12-01 Trevor Smigiel <trevor_smigiel@playstation.sony.com>
* config/spu/predicates.md (spu_mov_operand): Add.
* config/spu/spu.c (spu_expand_extv): Remove unused code.
(print_operand_address, print_operand): Handle addresses containing AND.
(spu_split_load, spu_split_store): Use updated movti pattern.
* config/spu/spu.md: (_mov<mode>, _movdi, _movti): Handle loads and
stores in mov patterns for correct operation of reload.
(lq, lq_<mode>, stq, stq_<mode>): Remove.
2006-12-01 Volker Reichelt <reichelt@igpm.rwth-aachen.de>
PR c++/30021

View File

@ -35,6 +35,10 @@
(and (match_operand 0 "memory_operand")
(match_test "reload_in_progress || reload_completed || aligned_mem_p (op)")))
(define_predicate "spu_mov_operand"
(ior (match_operand 0 "spu_mem_operand")
(match_operand 0 "spu_nonmem_operand")))
(define_predicate "call_operand"
(and (match_code "mem")
(match_test "(!TARGET_LARGE_MEM && satisfies_constraint_S (op))

View File

@ -366,52 +366,6 @@ spu_expand_extv (rtx ops[], int unsignedp)
dst_mode = GET_MODE (dst);
dst_size = GET_MODE_BITSIZE (GET_MODE (dst));
if (GET_CODE (ops[1]) == MEM)
{
if (start + width > MEM_ALIGN (ops[1]))
{
rtx addr = gen_reg_rtx (SImode);
rtx shl = gen_reg_rtx (SImode);
rtx shr = gen_reg_rtx (SImode);
rtx w0 = gen_reg_rtx (TImode);
rtx w1 = gen_reg_rtx (TImode);
rtx a0, a1;
src = gen_reg_rtx (TImode);
emit_move_insn (addr, copy_rtx (XEXP (ops[1], 0)));
a0 = memory_address (TImode, addr);
a1 = memory_address (TImode, plus_constant (addr, 16));
emit_insn (gen_lq (w0, a0));
emit_insn (gen_lq (w1, a1));
emit_insn (gen_andsi3 (shl, addr, GEN_INT (15)));
emit_insn (gen_iorsi3 (shr, addr, GEN_INT (16)));
emit_insn (gen_shlqby_ti (w0, w0, shl));
emit_insn (gen_rotqmby_ti (w1, w1, shr));
emit_insn (gen_iorti3 (src, w0, w1));
}
else
{
rtx addr = gen_reg_rtx (SImode);
rtx a0;
emit_move_insn (addr, copy_rtx (XEXP (ops[1], 0)));
a0 = memory_address (TImode, addr);
src = gen_reg_rtx (TImode);
emit_insn (gen_lq (src, a0));
if (MEM_ALIGN (ops[1]) < 128)
{
rtx t = src;
src = gen_reg_rtx (TImode);
emit_insn (gen_rotqby_ti (src, t, addr));
}
}
/* Shifts in SImode are faster, use them if we can. */
if (start + width < 32)
{
rtx t = src;
src = gen_reg_rtx (SImode);
emit_insn (gen_spu_convert (src, t));
}
}
src = adjust_operand (src, &start);
src_mode = GET_MODE (src);
src_size = GET_MODE_BITSIZE (GET_MODE (src));
@ -970,6 +924,11 @@ print_operand_address (FILE * file, register rtx addr)
rtx reg;
rtx offset;
if (GET_CODE (addr) == AND
&& GET_CODE (XEXP (addr, 1)) == CONST_INT
&& INTVAL (XEXP (addr, 1)) == -16)
addr = XEXP (addr, 0);
switch (GET_CODE (addr))
{
case REG:
@ -1254,6 +1213,11 @@ print_operand (FILE * file, rtx x, int code)
x = XEXP (x, 0);
xcode = GET_CODE (x);
}
if (xcode == AND)
{
x = XEXP (x, 0);
xcode = GET_CODE (x);
}
if (xcode == REG)
fprintf (file, "d");
else if (xcode == CONST_INT)
@ -3300,7 +3264,7 @@ spu_split_load (rtx * ops)
addr = gen_rtx_AND (SImode, copy_rtx (addr), GEN_INT (-16));
mem = change_address (ops[1], TImode, addr);
emit_insn (gen_lq_ti (load, mem));
emit_insn (gen_movti (load, mem));
if (rot)
emit_insn (gen_rotqby_ti (load, load, rot));
@ -3385,6 +3349,8 @@ spu_split_store (rtx * ops)
}
}
addr = gen_rtx_AND (SImode, copy_rtx (addr), GEN_INT (-16));
scalar = store_with_one_insn_p (ops[0]);
if (!scalar)
{
@ -3393,7 +3359,9 @@ spu_split_store (rtx * ops)
possible, and copying the flags will prevent that in certain
cases, e.g. consider the volatile flag. */
emit_insn (gen_lq (reg, copy_rtx (addr)));
rtx lmem = change_address (ops[0], TImode, copy_rtx (addr));
set_mem_alias_set (lmem, 0);
emit_insn (gen_movti (reg, lmem));
if (!p0 || reg_align (p0) >= 128)
p0 = stack_pointer_rtx;
@ -3428,13 +3396,12 @@ spu_split_store (rtx * ops)
emit_insn (gen_shlqby_ti
(reg, reg, GEN_INT (4 - GET_MODE_SIZE (mode))));
addr = gen_rtx_AND (SImode, copy_rtx (addr), GEN_INT (-16));
smem = change_address (ops[0], TImode, addr);
/* We can't use the previous alias set because the memory has changed
size and can potentially overlap objects of other types. */
set_mem_alias_set (smem, 0);
emit_insn (gen_stq_ti (smem, reg));
emit_insn (gen_movti (smem, reg));
}
/* Return TRUE if X is MEM which is a struct member reference

View File

@ -262,14 +262,16 @@
;; move internal
(define_insn "_mov<mode>"
[(set (match_operand:MOV 0 "spu_reg_operand" "=r,r,r")
(match_operand:MOV 1 "spu_nonmem_operand" "r,A,f"))]
""
[(set (match_operand:MOV 0 "spu_nonimm_operand" "=r,r,r,r,m")
(match_operand:MOV 1 "spu_mov_operand" "r,A,f,m,r"))]
"spu_valid_move (operands)"
"@
ori\t%0,%1,0
il%s1\t%0,%S1
fsmbi\t%0,%F1"
[(set_attr "type" "fx2,fx2,shuf")])
fsmbi\t%0,%F1
lq%p1\t%0,%1
stq%p0\t%1,%0"
[(set_attr "type" "fx2,fx2,shuf,load,store")])
(define_insn "high"
[(set (match_operand:SI 0 "spu_reg_operand" "=r")
@ -285,24 +287,28 @@
"iohl\t%0,%2@l")
(define_insn "_movdi"
[(set (match_operand:DI 0 "spu_reg_operand" "=r,r,r")
(match_operand:DI 1 "spu_nonmem_operand" "r,a,f"))]
""
[(set (match_operand:DI 0 "spu_nonimm_operand" "=r,r,r,r,m")
(match_operand:DI 1 "spu_mov_operand" "r,a,f,m,r"))]
"spu_valid_move (operands)"
"@
ori\t%0,%1,0
il%d1\t%0,%D1
fsmbi\t%0,%G1"
[(set_attr "type" "fx2,fx2,shuf")])
fsmbi\t%0,%G1
lq%p1\t%0,%1
stq%p0\t%1,%0"
[(set_attr "type" "fx2,fx2,shuf,load,store")])
(define_insn "_movti"
[(set (match_operand:TI 0 "spu_reg_operand" "=r,r,r")
(match_operand:TI 1 "spu_nonmem_operand" "r,U,f"))]
""
[(set (match_operand:TI 0 "spu_nonimm_operand" "=r,r,r,r,m")
(match_operand:TI 1 "spu_mov_operand" "r,U,f,m,r"))]
"spu_valid_move (operands)"
"@
ori\t%0,%1,0
il%t1\t%0,%T1
fsmbi\t%0,%H1"
[(set_attr "type" "fx2,fx2,shuf")])
fsmbi\t%0,%H1
lq%p1\t%0,%1
stq%p0\t%1,%0"
[(set_attr "type" "fx2,fx2,shuf,load,store")])
(define_insn_and_split "load"
[(set (match_operand 0 "spu_reg_operand" "=r")
@ -316,22 +322,6 @@
(match_dup 1))]
{ spu_split_load(operands); DONE; })
(define_insn "lq"
[(set (match_operand:TI 0 "spu_reg_operand" "=r")
(mem:TI (and:SI (match_operand:SI 1 "address_operand" "p")
(const_int -16))))]
""
"lq%p1\t%0,%a1"
[(set_attr "type" "load")])
(define_insn "lq_<mode>"
[(set (match_operand:ALL 0 "spu_reg_operand" "=r")
(match_operand:ALL 1 "spu_mem_operand" "m"))]
"spu_valid_move (operands)"
"lq%p1\t%0,%1"
[(set_attr "type" "load")])
(define_insn_and_split "store"
[(set (match_operand 0 "memory_operand" "=m")
(match_operand 1 "spu_reg_operand" "r"))
@ -344,21 +334,6 @@
(match_dup 1))]
{ spu_split_store(operands); DONE; })
(define_insn "stq"
[(set (mem:TI (and:SI (match_operand:SI 0 "address_operand" "p")
(const_int -16)))
(match_operand:TI 1 "spu_reg_operand" "r"))]
""
"stq%p0\t%1,%a0"
[(set_attr "type" "load")])
(define_insn "stq_<mode>"
[(set (match_operand:ALL 0 "spu_mem_operand" "=m")
(match_operand:ALL 1 "spu_reg_operand" "r"))]
"spu_valid_move (operands)"
"stq%p0\t%1,%0"
[(set_attr "type" "load")])
;; Operand 3 is the number of bytes. 1:b 2:h 4:w 8:d
(define_insn "cpat"
[(set (match_operand:TI 0 "spu_reg_operand" "=r,r")