simd-3.c: New.

* testsuite/gcc.c-torture/execute/simd-3.c: New.

        * expr.c (expand_expr): Handle VECTOR_CST.
        (const_vector_from_tree): New.

        * varasm.c (output_constant): Handle VECTOR_CST.

        * c-typeck.c (digest_init): Build a vector constant from a
        VECTOR_TYPE.

        * config/rs6000/rs6000.c: Remove prototype for
        easy_vector_constant.
        (easy_vector_constant): Add mode parameter.  Rewrite to handle
        more easy constants.
        (rs6000_emit_move): Pass mode to easy_vector_constant.
        Call emit_easy_vector_insn for SPE V2SI vector constant moves.
        (emit_easy_vector_insn): New.
        (easy_vector_same): New.
        (EASY_VECTOR_15): New macro.
        (EASY_VECTOR_15_ADD_SELF): New macro.
        (bdesc_2arg): Rename to xorv2si3.
        (easy_vector_constant_add_self): New.
        (input_operand): Allow vector constants.

        * config/rs6000/rs6000.h (PREDICATE_CODES): Add
        easy_vector_constant, easy_vector_constant_add_self.
        (EXTRA_CONSTRAINT): Add 'W'.

        * config/rs6000/rs6000-protos.h: Add prototype for
        easy_vector_constant, emit_easy_vector_insn.

        * config/rs6000/altivec.md (xorv8hi3): New.
        (xorv16qi3): New.
        Remove all _const0 patterns.
        (movv4si_internal): Rewrite to use code.  Add vector constant to
        vector alternative.  Add splitter.
        (movv8hi_internal): Same.
        (movv16qi_internal): Same.
        (movv4sf_internal): Same.
        Change the unspecs for vspltis* to use constants.

        * config/rs6000/spe.md ("xorv4hi3"): New.
        ("spe_evxor"): Rename to xorv2si3.
        ("xorv1di3"): New.
        Remove all _const0 patterns.
        (movv2si_internal): Rewrite to use code.  Add vector constant to
        alternatives.  Add splitter.
        (movv4hi_internal): Add vector constant to alternatives.
        (movv1di_internal): Same.
        (movv2sf_internal): Same.

From-SVN: r65130
This commit is contained in:
Aldy Hernandez 2003-04-01 13:40:11 +00:00 committed by Aldy Hernandez
parent 46e33d43a2
commit d744e06e5e
10 changed files with 450 additions and 157 deletions

View File

@ -1,3 +1,56 @@
2003-02-31 Aldy Hernandez <aldyh@redhat.com>
* testsuite/gcc.c-torture/execute/simd-3.c: New.
* expr.c (expand_expr): Handle VECTOR_CST.
(const_vector_from_tree): New.
* varasm.c (output_constant): Handle VECTOR_CST.
* c-typeck.c (digest_init): Build a vector constant from a
VECTOR_TYPE.
* config/rs6000/rs6000.c: Remove prototype for
easy_vector_constant.
(easy_vector_constant): Add mode parameter. Rewrite to handle
more easy constants.
(rs6000_emit_move): Pass mode to easy_vector_constant.
Call emit_easy_vector_insn for SPE V2SI vector constant moves.
(emit_easy_vector_insn): New.
(easy_vector_same): New.
(EASY_VECTOR_15): New macro.
(EASY_VECTOR_15_ADD_SELF): New macro.
(bdesc_2arg): Rename to xorv2si3.
(easy_vector_constant_add_self): New.
(input_operand): Allow vector constants.
* config/rs6000/rs6000.h (PREDICATE_CODES): Add
easy_vector_constant, easy_vector_constant_add_self.
(EXTRA_CONSTRAINT): Add 'W'.
* config/rs6000/rs6000-protos.h: Add prototype for
easy_vector_constant, emit_easy_vector_insn.
* config/rs6000/altivec.md (xorv8hi3): New.
(xorv16qi3): New.
Remove all _const0 patterns.
(movv4si_internal): Rewrite to use code. Add vector constant to
vector alternative. Add splitter.
(movv8hi_internal): Same.
(movv16qi_internal): Same.
(movv4sf_internal): Same.
Change the unspecs for vspltis* to use constants.
* config/rs6000/spe.md ("xorv4hi3"): New.
("spe_evxor"): Rename to xorv2si3.
("xorv1di3"): New.
Remove all _const0 patterns.
(movv2si_internal): Rewrite to use code. Add vector constant to
alternatives. Add splitter.
(movv4hi_internal): Add vector constant to alternatives.
(movv1di_internal): Same.
(movv2sf_internal): Same.
2003-03-31 Mark Mitchell <mark@codesourcery.com>
PR c/9936

View File

@ -4759,6 +4759,14 @@ digest_init (type, init, require_constant)
}
}
/* Build a VECTOR_CST from a *constant* vector constructor. If the
vector constructor is not constant (e.g. {1,2,3,foo()}) then punt
below and handle as a constructor. */
if (code == VECTOR_TYPE
&& comptypes (TREE_TYPE (inside_init), type)
&& TREE_CONSTANT (inside_init))
return build_vector (type, TREE_OPERAND (inside_init, 1));
/* Any type can be initialized
from an expression of the same type, optionally with braces. */

View File

@ -19,6 +19,12 @@
;; the Free Software Foundation, 59 Temple Place - Suite 330,
;; Boston, MA 02111-1307, USA.
(define_constants
[(UNSPEC_VSPLTISW 141)
(UNSPEC_VSPLTISH 140)
(UNSPEC_VSPLTISB 139)
])
;; Generic LVX load instruction.
(define_insn "altivec_lvx_4si"
[(set (match_operand:V4SI 0 "altivec_register_operand" "=v")
@ -85,18 +91,37 @@
"{ rs6000_emit_move (operands[0], operands[1], V4SImode); DONE; }")
(define_insn "*movv4si_internal"
[(set (match_operand:V4SI 0 "nonimmediate_operand" "=m,v,v,o,r,r")
(match_operand:V4SI 1 "input_operand" "v,m,v,r,o,r"))]
[(set (match_operand:V4SI 0 "nonimmediate_operand" "=m,v,v,o,r,r,v")
(match_operand:V4SI 1 "input_operand" "v,m,v,r,o,r,W"))]
"TARGET_ALTIVEC"
"@
stvx %1,%y0
lvx %0,%y1
vor %0,%1,%1
stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0
lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1
mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*")
(set_attr "length" "*,*,*,16,16,16")])
"*
{
switch (which_alternative)
{
case 0: return \"stvx %1,%y0\";
case 1: return \"lvx %0,%y1\";
case 2: return \"vor %0,%1,%1\";
case 3: return \"stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0\";
case 4: return \"lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1\";
case 5: return \"mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1\";
case 6: return output_vec_const_move (operands);
default: abort();
}
}"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,*")
(set_attr "length" "*,*,*,16,16,16,*")])
(define_split
[(set (match_operand:V4SI 0 "altivec_register_operand" "")
(match_operand:V4SI 1 "easy_vector_constant_add_self" ""))]
"TARGET_ALTIVEC && reload_completed"
[(set (match_dup 0)
(unspec:V4SI [(match_dup 3)] UNSPEC_VSPLTISW))
(set (match_dup 0)
(plus:V4SI (match_dup 0)
(match_dup 0)))]
"
{ operands[3] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)) >> 1); }")
(define_expand "movv8hi"
[(set (match_operand:V8HI 0 "nonimmediate_operand" "")
@ -105,18 +130,37 @@
"{ rs6000_emit_move (operands[0], operands[1], V8HImode); DONE; }")
(define_insn "*movv8hi_internal1"
[(set (match_operand:V8HI 0 "nonimmediate_operand" "=m,v,v,o,r,r")
(match_operand:V8HI 1 "input_operand" "v,m,v,r,o,r"))]
[(set (match_operand:V8HI 0 "nonimmediate_operand" "=m,v,v,o,r,r,v")
(match_operand:V8HI 1 "input_operand" "v,m,v,r,o,r,W"))]
"TARGET_ALTIVEC"
"@
stvx %1,%y0
lvx %0,%y1
vor %0,%1,%1
stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0
lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1
mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*")
(set_attr "length" "*,*,*,16,16,16")])
"*
{
switch (which_alternative)
{
case 0: return \"stvx %1,%y0\";
case 1: return \"lvx %0,%y1\";
case 2: return \"vor %0,%1,%1\";
case 3: return \"stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0\";
case 4: return \"lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1\";
case 5: return \"mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1\";
case 6: return output_vec_const_move (operands);
default: abort ();
}
}"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,*")
(set_attr "length" "*,*,*,16,16,16,*")])
(define_split
[(set (match_operand:V8HI 0 "altivec_register_operand" "")
(match_operand:V8HI 1 "easy_vector_constant_add_self" ""))]
"TARGET_ALTIVEC && reload_completed"
[(set (match_dup 0)
(unspec:V8HI [(match_dup 3)] UNSPEC_VSPLTISH))
(set (match_dup 0)
(plus:V8HI (match_dup 0)
(match_dup 0)))]
"
{ operands[3] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)) >> 1); }")
(define_expand "movv16qi"
[(set (match_operand:V16QI 0 "nonimmediate_operand" "")
@ -125,18 +169,37 @@
"{ rs6000_emit_move (operands[0], operands[1], V16QImode); DONE; }")
(define_insn "*movv16qi_internal1"
[(set (match_operand:V16QI 0 "nonimmediate_operand" "=m,v,v,o,r,r")
(match_operand:V16QI 1 "input_operand" "v,m,v,r,o,r"))]
[(set (match_operand:V16QI 0 "nonimmediate_operand" "=m,v,v,o,r,r,v")
(match_operand:V16QI 1 "input_operand" "v,m,v,r,o,r,W"))]
"TARGET_ALTIVEC"
"@
stvx %1,%y0
lvx %0,%y1
vor %0,%1,%1
stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0
lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1
mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*")
(set_attr "length" "*,*,*,16,16,16")])
"*
{
switch (which_alternative)
{
case 0: return \"stvx %1,%y0\";
case 1: return \"lvx %0,%y1\";
case 2: return \"vor %0,%1,%1\";
case 3: return \"stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0\";
case 4: return \"lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1\";
case 5: return \"mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1\";
case 6: return output_vec_const_move (operands);
default: abort ();
}
}"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,*")
(set_attr "length" "*,*,*,16,16,16,*")])
(define_split
[(set (match_operand:V16QI 0 "altivec_register_operand" "")
(match_operand:V16QI 1 "easy_vector_constant_add_self" ""))]
"TARGET_ALTIVEC && reload_completed"
[(set (match_dup 0)
(unspec:V16QI [(match_dup 3)] UNSPEC_VSPLTISB))
(set (match_dup 0)
(plus:V16QI (match_dup 0)
(match_dup 0)))]
"
{ operands[3] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)) >> 1); }")
(define_expand "movv4sf"
[(set (match_operand:V4SF 0 "nonimmediate_operand" "")
@ -145,18 +208,25 @@
"{ rs6000_emit_move (operands[0], operands[1], V4SFmode); DONE; }")
(define_insn "*movv4sf_internal1"
[(set (match_operand:V4SF 0 "nonimmediate_operand" "=m,v,v,o,r,r")
(match_operand:V4SF 1 "input_operand" "v,m,v,r,o,r"))]
[(set (match_operand:V4SF 0 "nonimmediate_operand" "=m,v,v,o,r,r,v")
(match_operand:V4SF 1 "input_operand" "v,m,v,r,o,r,W"))]
"TARGET_ALTIVEC"
"@
stvx %1,%y0
lvx %0,%y1
vor %0,%1,%1
stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0
lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1
mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*")
(set_attr "length" "*,*,*,16,16,16")])
"*
{
switch (which_alternative)
{
case 0: return \"stvx %1,%y0\";
case 1: return \"lvx %0,%y1\";
case 2: return \"vor %0,%1,%1\";
case 3: return \"stw%U0 %1,%0\;stw %L1,%L0\;stw %Y1,%Y0\;stw %Z1,%Z0\";
case 4: return \"lwz%U1 %0,%1\;lwz %L0,%L1\;lwz %Y0,%Y1\;lwz %Z0,%Z1\";
case 5: return \"mr %0,%1\;mr %L0,%L1\;mr %Y0,%Y1\;mr %Z0,%Z1\";
case 6: return output_vec_const_move (operands);
default: abort ();
}
}"
[(set_attr "type" "vecstore,vecload,vecsimple,store,load,*,*")
(set_attr "length" "*,*,*,16,16,16,*")])
(define_insn "get_vrsave_internal"
[(set (match_operand:SI 0 "register_operand" "=r")
@ -186,36 +256,6 @@
}"
[(set_attr "type" "*")])
;; Vector clears
(define_insn "*movv4si_const0"
[(set (match_operand:V4SI 0 "altivec_register_operand" "=v")
(match_operand:V4SI 1 "zero_constant" ""))]
"TARGET_ALTIVEC"
"vxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
(define_insn "*movv4sf_const0"
[(set (match_operand:V4SF 0 "altivec_register_operand" "=v")
(match_operand:V4SF 1 "zero_constant" ""))]
"TARGET_ALTIVEC"
"vxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
(define_insn "*movv8hi_const0"
[(set (match_operand:V8HI 0 "altivec_register_operand" "=v")
(match_operand:V8HI 1 "zero_constant" ""))]
"TARGET_ALTIVEC"
"vxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
(define_insn "*movv16qi_const0"
[(set (match_operand:V16QI 0 "altivec_register_operand" "=v")
(match_operand:V16QI 1 "zero_constant" ""))]
"TARGET_ALTIVEC"
"vxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
;; Simple binary operations.
(define_insn "addv16qi3"
@ -1279,6 +1319,7 @@
"vsumsws %0,%1,%2"
[(set_attr "type" "veccomplex")])
;; Vector xor's
(define_insn "xorv4si3"
[(set (match_operand:V4SI 0 "register_operand" "=v")
(xor:V4SI (match_operand:V4SI 1 "register_operand" "v")
@ -1287,6 +1328,22 @@
"vxor %0,%1,%2"
[(set_attr "type" "vecsimple")])
(define_insn "xorv8hi3"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(xor:V8HI (match_operand:V8HI 1 "register_operand" "v")
(match_operand:V8HI 2 "register_operand" "v")))]
"TARGET_ALTIVEC"
"vxor %0,%1,%2"
[(set_attr "type" "vecsimple")])
(define_insn "xorv16qi3"
[(set (match_operand:V16QI 0 "register_operand" "=v")
(xor:V16QI (match_operand:V16QI 1 "register_operand" "v")
(match_operand:V16QI 2 "register_operand" "v")))]
"TARGET_ALTIVEC"
"vxor %0,%1,%2"
[(set_attr "type" "vecsimple")])
(define_insn "altivec_vspltb"
[(set (match_operand:V16QI 0 "register_operand" "=v")
(unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")
@ -1294,6 +1351,7 @@
"TARGET_ALTIVEC"
"vspltb %0,%1,%2"
[(set_attr "type" "vecperm")])
;; End of vector xor's
(define_insn "altivec_vsplth"
[(set (match_operand:V8HI 0 "register_operand" "=v")
@ -1313,21 +1371,24 @@
(define_insn "altivec_vspltisb"
[(set (match_operand:V16QI 0 "register_operand" "=v")
(unspec:V16QI [(match_operand:QI 1 "immediate_operand" "i")] 139))]
(unspec:V16QI [(match_operand:QI 1 "immediate_operand" "i")]
UNSPEC_VSPLTISB))]
"TARGET_ALTIVEC"
"vspltisb %0,%1"
[(set_attr "type" "vecperm")])
(define_insn "altivec_vspltish"
[(set (match_operand:V8HI 0 "register_operand" "=v")
(unspec:V8HI [(match_operand:QI 1 "immediate_operand" "i")] 140))]
(unspec:V8HI [(match_operand:QI 1 "immediate_operand" "i")]
UNSPEC_VSPLTISH))]
"TARGET_ALTIVEC"
"vspltish %0,%1"
[(set_attr "type" "vecperm")])
(define_insn "altivec_vspltisw"
[(set (match_operand:V4SI 0 "register_operand" "=v")
(unspec:V4SI [(match_operand:QI 1 "immediate_operand" "i")] 141))]
(unspec:V4SI [(match_operand:QI 1 "immediate_operand" "i")]
UNSPEC_VSPLTISW))]
"TARGET_ALTIVEC"
"vspltisw %0,%1"
[(set_attr "type" "vecperm")])

View File

@ -54,6 +54,8 @@ extern int got_operand PARAMS ((rtx, enum machine_mode));
extern int got_no_const_operand PARAMS ((rtx, enum machine_mode));
extern int num_insns_constant PARAMS ((rtx, enum machine_mode));
extern int easy_fp_constant PARAMS ((rtx, enum machine_mode));
extern int easy_vector_constant PARAMS ((rtx, enum machine_mode));
extern const char *output_vec_const_move PARAMS ((rtx *));
extern int zero_fp_constant PARAMS ((rtx, enum machine_mode));
extern int zero_constant PARAMS ((rtx, enum machine_mode));
extern int volatile_mem_operand PARAMS ((rtx, enum machine_mode));

View File

@ -55,6 +55,13 @@ Boston, MA 02111-1307, USA. */
#define TARGET_NO_PROTOTYPE 0
#endif
#define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
&& easy_vector_same (x, y))
#define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
&& !((n) & 1) \
&& easy_vector_same (x, y))
#define min(A,B) ((A) < (B) ? (A) : (B))
#define max(A,B) ((A) > (B) ? (A) : (B))
@ -266,7 +273,8 @@ static int first_altivec_reg_to_save PARAMS ((void));
static unsigned int compute_vrsave_mask PARAMS ((void));
static void is_altivec_return_reg PARAMS ((rtx, void *));
static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
static int easy_vector_constant PARAMS ((rtx));
int easy_vector_constant PARAMS ((rtx, enum machine_mode));
static int easy_vector_same PARAMS ((rtx, enum machine_mode));
static bool is_ev64_opaque_type PARAMS ((tree));
static rtx rs6000_dwarf_register_span PARAMS ((rtx));
@ -1416,48 +1424,149 @@ easy_fp_constant (op, mode)
abort ();
}
/* Return 1 if the operand is a CONST_INT and can be put into a
register with one instruction. */
/* Return non zero if all elements of a vector have the same value. */
static int
easy_vector_constant (op)
easy_vector_same (op, mode)
rtx op;
enum machine_mode mode ATTRIBUTE_UNUSED;
{
rtx elt;
int units, i;
if (GET_CODE (op) != CONST_VECTOR)
return 0;
int units, i, cst;
units = CONST_VECTOR_NUNITS (op);
/* We can generate 0 easily. Look for that. */
for (i = 0; i < units; ++i)
cst = INTVAL (CONST_VECTOR_ELT (op, 0));
for (i = 1; i < units; ++i)
if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
break;
if (i == units)
return 1;
return 0;
}
/* Return 1 if the operand is a CONST_INT and can be put into a
register without using memory. */
int
easy_vector_constant (op, mode)
rtx op;
enum machine_mode mode;
{
int cst, cst2;
if (GET_CODE (op) != CONST_VECTOR
|| (!TARGET_ALTIVEC
&& !TARGET_SPE))
return 0;
if (zero_constant (op, mode)
&& ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
|| (TARGET_SPE && SPE_VECTOR_MODE (mode))))
return 1;
if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
return 0;
cst = INTVAL (CONST_VECTOR_ELT (op, 0));
cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
/* Limit SPE vectors to 15 bits signed. These we can generate with:
li r0, CONSTANT1
evmergelo r0, r0, r0
li r0, CONSTANT2
I don't know how efficient it would be to allow bigger constants,
considering we'll have an extra 'ori' for every 'li'. I doubt 5
instructions is better than a 64-bit memory load, but I don't
have the e500 timing specs. */
if (TARGET_SPE && mode == V2SImode
&& cst >= -0x7fff && cst <= 0x7fff
&& cst2 >= -0x7fff && cst <= 0x7fff)
return 1;
if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
return 1;
if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
return 1;
return 0;
}
/* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
int
easy_vector_constant_add_self (op, mode)
rtx op;
enum machine_mode mode;
{
int cst;
if (!easy_vector_constant (op, mode))
return 0;
cst = INTVAL (CONST_VECTOR_ELT (op, 0));
return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
}
const char *
output_vec_const_move (operands)
rtx *operands;
{
int cst, cst2;
enum machine_mode mode;
rtx dest, vec;
dest = operands[0];
vec = operands[1];
cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
mode = GET_MODE (dest);
if (TARGET_ALTIVEC)
{
elt = CONST_VECTOR_ELT (op, i);
/* We could probably simplify this by just checking for equality
with CONST0_RTX for the current mode, but let's be safe
instead. */
switch (GET_CODE (elt))
if (zero_constant (vec, mode))
return "vxor %0,%0,%0";
else if (EASY_VECTOR_15 (cst, vec, mode))
{
case CONST_INT:
if (INTVAL (elt) != 0)
return 0;
break;
case CONST_DOUBLE:
if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
return 0;
break;
default:
return 0;
operands[1] = GEN_INT (cst);
switch (mode)
{
case V4SImode:
return "vspltisw %0,%1";
case V8HImode:
return "vspltish %0,%1";
case V16QImode:
return "vspltisb %0,%1";
default:
abort ();
}
}
else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
return "#";
else
abort ();
}
/* We could probably generate a few other constants trivially, but
gcc doesn't generate them yet. FIXME later. */
return 1;
if (TARGET_SPE)
{
/* Vector constant 0 is handled as a splitter of V2SI, and in the
pattern of V1DI, V4HI, and V2SF.
FIXME: We should probabl return # and add post reload
splitters for these, but this way is so easy ;-).
*/
operands[1] = GEN_INT (cst);
operands[2] = GEN_INT (cst2);
if (cst == cst2)
return "li %0,%1\n\tevmergelo %0,%0,%0";
else
return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
}
abort ();
}
/* Return 1 if the operand is the constant 0. This works for scalars
@ -1990,6 +2099,11 @@ input_operand (op, mode)
|| GET_CODE (op) == CONST_DOUBLE))
return 1;
/* Allow easy vector constants. */
if (GET_CODE (op) == CONST_VECTOR
&& easy_vector_constant (op, mode))
return 1;
/* For floating-point or multi-word mode, the only remaining valid type
is a register. */
if (GET_MODE_CLASS (mode) == MODE_FLOAT
@ -2744,7 +2858,7 @@ rs6000_emit_move (dest, source, mode)
case V2SImode:
case V1DImode:
if (CONSTANT_P (operands[1])
&& !easy_vector_constant (operands[1]))
&& !easy_vector_constant (operands[1], mode))
operands[1] = force_const_mem (mode, operands[1]);
break;

View File

@ -1292,6 +1292,7 @@ enum reg_class
'S' is a constant that can be placed into a 64-bit mask operand
'T' is a constant that can be placed into a 32-bit mask operand
'U' is for V.4 small data references.
'W' is a vector constant that can be easily generated (no mem refs).
't' is for AND masks that can be performed by two rldic{l,r} insns. */
#define EXTRA_CONSTRAINT(OP, C) \
@ -1305,6 +1306,7 @@ enum reg_class
&& (fixed_regs[CR0_REGNO] \
|| !logical_operand (OP, DImode)) \
&& !mask64_operand (OP, DImode)) \
: (C) == 'W' ? (easy_vector_constant (OP, GET_MODE (OP))) \
: 0)
/* Given an rtx X being reloaded into a reg required to be
@ -2740,6 +2742,8 @@ extern char rs6000_reg_names[][8]; /* register names (0 vs. %r0). */
{"got_operand", {SYMBOL_REF, CONST, LABEL_REF}}, \
{"got_no_const_operand", {SYMBOL_REF, LABEL_REF}}, \
{"easy_fp_constant", {CONST_DOUBLE}}, \
{"easy_vector_constant", {CONST_VECTOR}}, \
{"easy_vector_constant_add_self", {CONST_VECTOR}}, \
{"zero_fp_constant", {CONST_DOUBLE}}, \
{"reg_or_mem_operand", {SUBREG, MEM, REG}}, \
{"lwa_operand", {SUBREG, MEM, REG}}, \

View File

@ -2147,36 +2147,6 @@
[(set_attr "type" "vecstore")
(set_attr "length" "4")])
;; SPE vector clears
(define_insn "*movv2si_const0"
[(set (match_operand:V2SI 0 "gpc_reg_operand" "=r")
(match_operand:V2SI 1 "zero_constant" ""))]
"TARGET_SPE"
"evxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
(define_insn "*movv2sf_const0"
[(set (match_operand:V2SF 0 "gpc_reg_operand" "=r")
(match_operand:V2SF 1 "zero_constant" ""))]
"TARGET_SPE"
"evxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
(define_insn "*movv4hi_const0"
[(set (match_operand:V4HI 0 "gpc_reg_operand" "=r")
(match_operand:V4HI 1 "zero_constant" ""))]
"TARGET_SPE"
"evxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
(define_insn "*movv1di_const0"
[(set (match_operand:V1DI 0 "gpc_reg_operand" "=r")
(match_operand:V1DI 1 "zero_constant" ""))]
"TARGET_SPE"
"evxor %0,%0,%0"
[(set_attr "type" "vecsimple")])
;; Vector move instructions.
(define_expand "movv2si"
@ -2185,16 +2155,31 @@
"TARGET_SPE"
"{ rs6000_emit_move (operands[0], operands[1], V2SImode); DONE; }")
(define_insn "*movv2si_internal"
[(set (match_operand:V2SI 0 "nonimmediate_operand" "=m,r,r")
(match_operand:V2SI 1 "input_operand" "r,m,r"))]
[(set (match_operand:V2SI 0 "nonimmediate_operand" "=m,r,r,r")
(match_operand:V2SI 1 "input_operand" "r,m,r,W"))]
"TARGET_SPE"
"@
evstdd%X0 %1,%y0
evldd%X1 %0,%y1
evor %0,%1,%1"
[(set_attr "type" "vecload,vecload,vecsimple")])
"*
{
switch (which_alternative)
{
case 0: return \"evstdd%X0 %1,%y0\";
case 1: return \"evldd%X1 %0,%y1\";
case 2: return \"evor %0,%1,%1\";
case 3: return output_vec_const_move (operands);
default: abort ();
}
}"
[(set_attr "type" "vecload,vecstore,*,*")
(set_attr "length" "*,*,*,12")])
(define_split
[(set (match_operand:V2SI 0 "register_operand" "")
(match_operand:V2SI 1 "zero_constant" ""))]
"TARGET_SPE && reload_completed"
[(set (match_dup 0)
(xor:V2SI (match_dup 0) (match_dup 0)))]
"")
(define_expand "movv1di"
[(set (match_operand:V1DI 0 "nonimmediate_operand" "")
@ -2203,14 +2188,16 @@
"{ rs6000_emit_move (operands[0], operands[1], V1DImode); DONE; }")
(define_insn "*movv1di_internal"
[(set (match_operand:V1DI 0 "nonimmediate_operand" "=m,r,r")
(match_operand:V1DI 1 "input_operand" "r,m,r"))]
[(set (match_operand:V1DI 0 "nonimmediate_operand" "=m,r,r,r")
(match_operand:V1DI 1 "input_operand" "r,m,r,W"))]
"TARGET_SPE"
"@
evstdd%X0 %1,%y0
evldd%X1 %0,%y1
evor %0,%1,%1"
[(set_attr "type" "vecload,vecload,vecsimple")])
evor %0,%1,%1
evxor %0,%0,%0"
[(set_attr "type" "vecload,vecstore,*,*")
(set_attr "length" "*,*,*,*")])
(define_expand "movv4hi"
[(set (match_operand:V4HI 0 "nonimmediate_operand" "")
@ -2226,7 +2213,7 @@
evstdd%X0 %1,%y0
evldd%X1 %0,%y1
evor %0,%1,%1"
[(set_attr "type" "vecload,vecload,vecsimple")])
[(set_attr "type" "vecload")])
(define_expand "movv2sf"
[(set (match_operand:V2SF 0 "nonimmediate_operand" "")
@ -2235,14 +2222,16 @@
"{ rs6000_emit_move (operands[0], operands[1], V2SFmode); DONE; }")
(define_insn "*movv2sf_internal"
[(set (match_operand:V2SF 0 "nonimmediate_operand" "=m,r,r")
(match_operand:V2SF 1 "input_operand" "r,m,r"))]
[(set (match_operand:V2SF 0 "nonimmediate_operand" "=m,r,r,r")
(match_operand:V2SF 1 "input_operand" "r,m,r,W"))]
"TARGET_SPE"
"@
evstdd%X0 %1,%y0
evldd%X1 %0,%y1
evor %0,%1,%1"
[(set_attr "type" "vecload,vecload,vecsimple")])
evor %0,%1,%1
evxor %0,%0,%0"
[(set_attr "type" "vecload,vecstore,*,*")
(set_attr "length" "*,*,*,*")])
(define_insn "spe_evmwhssfaa"
[(set (match_operand:V2SI 0 "gpc_reg_operand" "=r")

View File

@ -155,7 +155,7 @@ static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
static tree clear_storage_libcall_fn PARAMS ((int));
static rtx compress_float_constant PARAMS ((rtx, rtx));
static rtx get_subtarget PARAMS ((rtx));
static int is_zeros_p PARAMS ((tree));
static int is_zeros_p PARAMS ((tree));
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
@ -175,6 +175,7 @@ static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
#endif
static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
static rtx const_vector_from_tree PARAMS ((tree));
/* Record for each mode whether we can move a register directly to or
from an object of that mode in memory. If we can't, we won't try
@ -6842,6 +6843,9 @@ expand_expr (exp, target, tmode, modifier)
return temp;
case VECTOR_CST:
return const_vector_from_tree (exp);
case CONST_DECL:
return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
@ -10329,4 +10333,41 @@ vector_mode_valid_p (mode)
return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
}
/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
static rtx
const_vector_from_tree (exp)
tree exp;
{
rtvec v;
int units, i;
tree link, elt;
enum machine_mode inner, mode;
mode = TYPE_MODE (TREE_TYPE (exp));
if (is_zeros_p (exp))
return CONST0_RTX (mode);
units = GET_MODE_NUNITS (mode);
inner = GET_MODE_INNER (mode);
v = rtvec_alloc (units);
link = TREE_VECTOR_CST_ELTS (exp);
for (i = 0; link; link = TREE_CHAIN (link), ++i)
{
elt = TREE_VALUE (link);
if (TREE_CODE (elt) == REAL_CST)
RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
inner);
else
RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
TREE_INT_CST_HIGH (elt),
inner);
}
return gen_rtx_raw_CONST_VECTOR (mode, v);
}
#include "gt-expr.h"

View File

@ -1,3 +1,7 @@
2003-03-01 Aldy Hernandez <aldyh@redhat.com>
* testsuite/gcc.c-torture/execute/simd-3.c: New.
2003-03-31 Mark Mitchell <mark@codesourcery.com>
PR c/9936

View File

@ -4015,6 +4015,23 @@ output_constant (exp, size, align)
thissize = MIN (TREE_STRING_LENGTH (exp), size);
assemble_string (TREE_STRING_POINTER (exp), thissize);
}
else if (TREE_CODE (exp) == VECTOR_CST)
{
int elt_size;
tree link;
unsigned int nalign;
enum machine_mode inner;
inner = GET_MODE_INNER (TYPE_MODE (TREE_TYPE (exp)));
nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
elt_size = GET_MODE_UNIT_SIZE (TYPE_MODE (TREE_TYPE (exp)));
link = TREE_VECTOR_CST_ELTS (exp);
output_constant (TREE_VALUE (link), elt_size, align);
while ((link = TREE_CHAIN (link)) != NULL)
output_constant (TREE_VALUE (link), elt_size, nalign);
}
else
abort ();
break;