From 100c45611f93bea9e0d7971cd8f8988d4451a5a7 Mon Sep 17 00:00:00 2001 From: Aldy Hernandez Date: Mon, 11 Feb 2002 06:15:37 +0000 Subject: [PATCH] rs6000.c (altivec_init_builtins): Handle __builtin_altivec_abs*. 2002-02-06 Aldy Hernandez * config/rs6000/rs6000.c (altivec_init_builtins): Handle __builtin_altivec_abs*. (bdesc_abs): New. * config/rs6000/rs6000.h (rs6000_builtins): Add ALTIVEC_BUILTIN_ABS*. * config/rs6000/altivec.h: Use const char for builtins expecting literals. (vec_abs): New versions for C and C++. (vec_abss): Same. From-SVN: r49671 --- gcc/config/rs6000/altivec.h | 182 ++++++++++++++++++++++++------------ gcc/config/rs6000/rs6000.c | 99 +++++++++++++++++++- gcc/config/rs6000/rs6000.h | 9 +- gcc/config/rs6000/rs6000.md | 71 ++++++++++++++ 4 files changed, 301 insertions(+), 60 deletions(-) diff --git a/gcc/config/rs6000/altivec.h b/gcc/config/rs6000/altivec.h index a0ba92f68a4..a25206d60f6 100644 --- a/gcc/config/rs6000/altivec.h +++ b/gcc/config/rs6000/altivec.h @@ -70,6 +70,52 @@ extern int __altivec_link_error_invalid_argument (); #ifdef __cplusplus +/* vec_abs */ + +inline vector signed char +vec_abs (vector signed char a1) +{ + return __builtin_altivec_abs_v16qi (a1); +} + +inline vector signed short +vec_abs (vector signed short a1) +{ + return __builtin_altivec_abs_v8hi (a1); +} + +inline vector signed int +vec_abs (vector signed int a1) +{ + return __builtin_altivec_abs_v4si (a1); +} + +inline vector float +vec_abs (vector float a1) +{ + return __builtin_altivec_abs_v4sf (a1); +} + +/* vec_abss */ + +inline vector signed char +vec_abss (vector signed char a1) +{ + return __builtin_altivec_abss_v16qi (a1); +} + +inline vector signed short +vec_abss (vector signed short a1) +{ + return __builtin_altivec_abss_v8hi (a1); +} + +inline vector signed int +vec_abss (vector signed int a1) +{ + return __builtin_altivec_abss_v4si (a1); +} + /* vec_add */ inline vector signed char @@ -621,39 +667,39 @@ vec_cmplt (vector float a1, vector float a2) /* vec_ctf */ inline vector float -vec_ctf (vector unsigned int a1, int a2) +vec_ctf (vector unsigned int a1, const char a2) { - return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, (const char) a2); + return (vector float) __builtin_altivec_vcfux ((vector signed int) a1, a2); } inline vector float -vec_ctf (vector signed int a1, int a2) +vec_ctf (vector signed int a1, const char a2) { - return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, (const char) a2); + return (vector float) __builtin_altivec_vcfsx ((vector signed int) a1, a2); } /* vec_cts */ inline vector signed int -vec_cts (vector float a1, int a2) +vec_cts (vector float a1, const char a2) { - return (vector signed int) __builtin_altivec_vctsxs ((vector float) a1, (const char) a2); + return (vector signed int) __builtin_altivec_vctsxs ((vector float) a1, a2); } /* vec_ctu */ inline vector unsigned int -vec_ctu (vector float a1, int a2) +vec_ctu (vector float a1, const char a2) { - return (vector unsigned int) __builtin_altivec_vctuxs ((vector float) a1, (const char) a2); + return (vector unsigned int) __builtin_altivec_vctuxs ((vector float) a1, a2); } /* vec_dss */ inline void -vec_dss (int a1) +vec_dss (const char a1) { - __builtin_altivec_dss ((const char) a1); + __builtin_altivec_dss (a1); } /* vec_dssall */ @@ -667,33 +713,33 @@ vec_dssall () /* vec_dst */ inline void -vec_dst (void *a1, int a2, int a3) +vec_dst (void *a1, int a2, const char a3) { - __builtin_altivec_dst ((void *) a1, a2, (const char) a3); + __builtin_altivec_dst ((void *) a1, a2, a3); } /* vec_dstst */ inline void -vec_dstst (void *a1, int a2, int a3) +vec_dstst (void *a1, int a2, const char a3) { - __builtin_altivec_dstst ((void *) a1, a2, (const char) a3); + __builtin_altivec_dstst ((void *) a1, a2, a3); } /* vec_dststt */ inline void -vec_dststt (void *a1, int a2, int a3) +vec_dststt (void *a1, int a2, const char a3) { - __builtin_altivec_dststt ((void *) a1, a2, (const char) a3); + __builtin_altivec_dststt ((void *) a1, a2, a3); } /* vec_dstt */ inline void -vec_dstt (void *a1, int a2, int a3) +vec_dstt (void *a1, int a2, const char a3) { - __builtin_altivec_dstt ((void *) a1, a2, (const char) a3); + __builtin_altivec_dstt ((void *) a1, a2, a3); } /* vec_expte */ @@ -1923,45 +1969,45 @@ vec_sl (vector unsigned int a1, vector unsigned int a2) /* vec_sld */ inline vector float -vec_sld (vector float a1, vector float a2, int a3) +vec_sld (vector float a1, vector float a2, const char a3) { - return (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector float) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } inline vector signed int -vec_sld (vector signed int a1, vector signed int a2, int a3) +vec_sld (vector signed int a1, vector signed int a2, const char a3) { - return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector signed int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } inline vector unsigned int -vec_sld (vector unsigned int a1, vector unsigned int a2, int a3) +vec_sld (vector unsigned int a1, vector unsigned int a2, const char a3) { - return (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector unsigned int) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } inline vector signed short -vec_sld (vector signed short a1, vector signed short a2, int a3) +vec_sld (vector signed short a1, vector signed short a2, const char a3) { - return (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector signed short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } inline vector unsigned short -vec_sld (vector unsigned short a1, vector unsigned short a2, int a3) +vec_sld (vector unsigned short a1, vector unsigned short a2, const char a3) { - return (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector unsigned short) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } inline vector signed char -vec_sld (vector signed char a1, vector signed char a2, int a3) +vec_sld (vector signed char a1, vector signed char a2, const char a3) { - return (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector signed char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } inline vector unsigned char -vec_sld (vector unsigned char a1, vector unsigned char a2, int a3) +vec_sld (vector unsigned char a1, vector unsigned char a2, const char a3) { - return (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, (const char) a3); + return (vector unsigned char) __builtin_altivec_vsldoi_4si ((vector signed int) a1, (vector signed int) a2, a3); } /* vec_sll */ @@ -2163,93 +2209,93 @@ vec_slo (vector unsigned char a1, vector unsigned char a2) /* vec_splat */ inline vector signed char -vec_splat (vector signed char a1, int a2) +vec_splat (vector signed char a1, const char a2) { - return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2); + return (vector signed char) __builtin_altivec_vspltb ((vector signed char) a1, a2); } inline vector unsigned char -vec_splat (vector unsigned char a1, int a2) +vec_splat (vector unsigned char a1, const char a2) { - return (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, (const char) a2); + return (vector unsigned char) __builtin_altivec_vspltb ((vector signed char) a1, a2); } inline vector signed short -vec_splat (vector signed short a1, int a2) +vec_splat (vector signed short a1, const char a2) { - return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2); + return (vector signed short) __builtin_altivec_vsplth ((vector signed short) a1, a2); } inline vector unsigned short -vec_splat (vector unsigned short a1, int a2) +vec_splat (vector unsigned short a1, const char a2) { - return (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, (const char) a2); + return (vector unsigned short) __builtin_altivec_vsplth ((vector signed short) a1, a2); } inline vector float -vec_splat (vector float a1, int a2) +vec_splat (vector float a1, const char a2) { - return (vector float) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2); + return (vector float) __builtin_altivec_vspltw ((vector signed int) a1, a2); } inline vector signed int -vec_splat (vector signed int a1, int a2) +vec_splat (vector signed int a1, const char a2) { - return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2); + return (vector signed int) __builtin_altivec_vspltw ((vector signed int) a1, a2); } inline vector unsigned int -vec_splat (vector unsigned int a1, int a2) +vec_splat (vector unsigned int a1, const char a2) { - return (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, (const char) a2); + return (vector unsigned int) __builtin_altivec_vspltw ((vector signed int) a1, a2); } /* vec_splat_s8 */ inline vector signed char -vec_splat_s8 (int a1) +vec_splat_s8 (const char a1) { - return (vector signed char) __builtin_altivec_vspltisb ((const char) a1); + return (vector signed char) __builtin_altivec_vspltisb (a1); } /* vec_splat_s16 */ inline vector signed short -vec_splat_s16 (int a1) +vec_splat_s16 (const char a1) { - return (vector signed short) __builtin_altivec_vspltish ((const char) a1); + return (vector signed short) __builtin_altivec_vspltish (a1); } /* vec_splat_s32 */ inline vector signed int -vec_splat_s32 (int a1) +vec_splat_s32 (const char a1) { - return (vector signed int) __builtin_altivec_vspltisw ((const char) a1); + return (vector signed int) __builtin_altivec_vspltisw (a1); } /* vec_splat_u8 */ inline vector unsigned char -vec_splat_u8 (int a1) +vec_splat_u8 (const char a1) { - return (vector unsigned char) __builtin_altivec_vspltisb ((const char) a1); + return (vector unsigned char) __builtin_altivec_vspltisb (a1); } /* vec_splat_u16 */ inline vector unsigned short -vec_splat_u16 (int a1) +vec_splat_u16 (const char a1) { - return (vector unsigned short) __builtin_altivec_vspltish ((const char) a1); + return (vector unsigned short) __builtin_altivec_vspltish (a1); } /* vec_splat_u32 */ inline vector unsigned int -vec_splat_u32 (int a1) +vec_splat_u32 (const char a1) { - return (vector unsigned int) __builtin_altivec_vspltisw ((const char) a1); + return (vector unsigned int) __builtin_altivec_vspltisw (a1); } /* vec_sr */ @@ -4111,6 +4157,26 @@ struct __vec_step_help /* "... and so I think no man in a century will suffer as greatly as you will." */ +#define vec_abs(a) \ + __ch (__un_args_eq (vector signed char, a), \ + (vector signed char) __builtin_altivec_abs_v16qi ((vector signed char) a), \ + __ch (__un_args_eq (vector signed short, a), \ + (vector signed short) __builtin_altivec_abs_v8hi ((vector signed short) a), \ + __ch (__un_args_eq (vector signed int, a), \ + (vector signed int) __builtin_altivec_abs_v4si ((vector signed int) a), \ + __ch (__un_args_eq (vector float, a), \ + (vector float) __builtin_altivec_abs_v4sf ((vector float) a), \ + __altivec_link_error_invalid_argument ())))) + +#define vec_abss(a) \ + __ch (__un_args_eq (vector signed char, a), \ + (vector signed char) __builtin_altivec_abss_v16qi ((vector signed char) a), \ + __ch (__un_args_eq (vector signed short, a), \ + (vector signed short) __builtin_altivec_abss_v8hi ((vector signed short) a), \ + __ch (__un_args_eq (vector signed int, a), \ + (vector signed int) __builtin_altivec_abss_v4si ((vector signed int) a), \ + __altivec_link_error_invalid_argument ()))) + #define vec_step(t) \ __ch (__builtin_types_compatible_p (t, vector signed int), 4, \ __ch (__builtin_types_compatible_p (t, vector unsigned int), 4, \ diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c index 23a05f48ee0..5817165cbb5 100644 --- a/gcc/config/rs6000/rs6000.c +++ b/gcc/config/rs6000/rs6000.c @@ -163,6 +163,7 @@ static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int static rtx altivec_expand_builtin PARAMS ((tree, rtx)); static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx)); +static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx)); static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx)); static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree)); @@ -3402,6 +3403,19 @@ static const struct builtin_description_predicates bdesc_altivec_preds[] = { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P } }; +/* ABS* opreations. */ + +static const struct builtin_description bdesc_abs[] = +{ + { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI }, + { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI }, + { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF }, + { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI }, + { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI }, + { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI }, + { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI } +}; + /* Simple unary operations: VECb = foo (unsigned literal) or VECb = foo (VECa). */ @@ -3458,6 +3472,41 @@ altivec_expand_unop_builtin (icode, arglist, target) return target; } +static rtx +altivec_expand_abs_builtin (icode, arglist, target) + enum insn_code icode; + tree arglist; + rtx target; +{ + rtx pat, scratch1, scratch2; + tree arg0 = TREE_VALUE (arglist); + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + enum machine_mode tmode = insn_data[icode].operand[0].mode; + enum machine_mode mode0 = insn_data[icode].operand[1].mode; + + /* If we have invalid arguments, bail out before generating bad rtl. */ + if (arg0 == error_mark_node) + return NULL_RTX; + + if (target == 0 + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = copy_to_mode_reg (mode0, op0); + + scratch1 = gen_reg_rtx (mode0); + scratch2 = gen_reg_rtx (mode0); + + pat = GEN_FCN (icode) (target, op0, scratch1, scratch2); + if (! pat) + return 0; + emit_insn (pat); + + return target; +} + static rtx altivec_expand_binop_builtin (icode, arglist, target) enum insn_code icode; @@ -3936,6 +3985,12 @@ altivec_expand_builtin (exp, target) return NULL_RTX; } + /* Expand abs* operations. */ + d = (struct builtin_description *) bdesc_abs; + for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++) + if (d->code == fcode) + return altivec_expand_abs_builtin (d->icode, arglist, target); + /* Handle simple unary operations. */ d = (struct builtin_description *) bdesc_1arg; for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++) @@ -4322,6 +4377,18 @@ altivec_init_builtins (void) tree_cons (NULL_TREE, V4SF_type_node, endlink))); + tree v4si_ftype_v4si + = build_function_type (V4SI_type_node, + tree_cons (NULL_TREE, V4SI_type_node, endlink)); + + tree v8hi_ftype_v8hi + = build_function_type (V8HI_type_node, + tree_cons (NULL_TREE, V8HI_type_node, endlink)); + + tree v16qi_ftype_v16qi + = build_function_type (V16QI_type_node, + tree_cons (NULL_TREE, V16QI_type_node, endlink)); + tree v8hi_ftype_v16qi_v16qi = build_function_type (V8HI_type_node, tree_cons (NULL_TREE, V16QI_type_node, @@ -4466,7 +4533,7 @@ altivec_init_builtins (void) def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX); def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX); def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL); - + /* Add the simple ternary operators. */ d = (struct builtin_description *) bdesc_3arg; for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++) @@ -4713,6 +4780,36 @@ altivec_init_builtins (void) def_builtin (d->mask, d->name, type, d->code); } + /* Initialize the abs* operators. */ + d = (struct builtin_description *) bdesc_abs; + for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++) + { + enum machine_mode mode0; + tree type; + + mode0 = insn_data[d->icode].operand[0].mode; + + switch (mode0) + { + case V4SImode: + type = v4si_ftype_v4si; + break; + case V8HImode: + type = v8hi_ftype_v8hi; + break; + case V16QImode: + type = v16qi_ftype_v16qi; + break; + case V4SFmode: + type = v4sf_ftype_v4sf; + break; + default: + abort (); + } + + def_builtin (d->mask, d->name, type, d->code); + } + /* Add the simple unary operators. */ d = (struct builtin_description *) bdesc_1arg; for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++) diff --git a/gcc/config/rs6000/rs6000.h b/gcc/config/rs6000/rs6000.h index 22f204235ee..815ab2a2188 100644 --- a/gcc/config/rs6000/rs6000.h +++ b/gcc/config/rs6000/rs6000.h @@ -2971,5 +2971,12 @@ enum rs6000_builtins ALTIVEC_BUILTIN_VCMPGTSW_P, ALTIVEC_BUILTIN_VCMPGTUB_P, ALTIVEC_BUILTIN_VCMPGTUH_P, - ALTIVEC_BUILTIN_VCMPGTUW_P + ALTIVEC_BUILTIN_VCMPGTUW_P, + ALTIVEC_BUILTIN_ABSS_V4SI, + ALTIVEC_BUILTIN_ABSS_V8HI, + ALTIVEC_BUILTIN_ABSS_V16QI, + ALTIVEC_BUILTIN_ABS_V4SI, + ALTIVEC_BUILTIN_ABS_V4SF, + ALTIVEC_BUILTIN_ABS_V8HI, + ALTIVEC_BUILTIN_ABS_V16QI }; diff --git a/gcc/config/rs6000/rs6000.md b/gcc/config/rs6000/rs6000.md index a1e03038714..04c9ab5abb3 100644 --- a/gcc/config/rs6000/rs6000.md +++ b/gcc/config/rs6000/rs6000.md @@ -15033,6 +15033,7 @@ "TARGET_ALTIVEC" "vspltb %0,%1,%2" [(set_attr "type" "vecperm")]) + (define_insn "altivec_vsplth" [(set (match_operand:V8HI 0 "register_operand" "=v") (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") @@ -15565,3 +15566,73 @@ "TARGET_ALTIVEC" "stvewx %2,%0,%1" [(set_attr "type" "vecstore")]) + +(define_insn "absv16qi2" + [(set (match_operand:V16QI 0 "register_operand" "=v") + (abs:V16QI (match_operand:V16QI 1 "register_operand" "v"))) + (clobber (match_scratch:V16QI 2 "=v")) + (clobber (match_scratch:V16QI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsububm %3,%2,%1\;vmaxsb %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "absv8hi2" + [(set (match_operand:V8HI 0 "register_operand" "=v") + (abs:V8HI (match_operand:V8HI 1 "register_operand" "v"))) + (clobber (match_scratch:V8HI 2 "=v")) + (clobber (match_scratch:V8HI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubuhm %3,%2,%1\;vmaxsh %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "absv4si2" + [(set (match_operand:V4SI 0 "register_operand" "=v") + (abs:V4SI (match_operand:V4SI 1 "register_operand" "v"))) + (clobber (match_scratch:V4SI 2 "=v")) + (clobber (match_scratch:V4SI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubuwm %3,%2,%1\;vmaxsw %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "absv4sf2" + [(set (match_operand:V4SF 0 "register_operand" "=v") + (abs:V4SF (match_operand:V4SF 1 "register_operand" "v"))) + (clobber (match_scratch:V4SF 2 "=v")) + (clobber (match_scratch:V4SF 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisw %2, -1\;vslw %3,%2,%2\;vandc %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "altivec_abss_v16qi" + [(set (match_operand:V16QI 0 "register_operand" "=v") + (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v")] 210)) + (clobber (match_scratch:V16QI 2 "=v")) + (clobber (match_scratch:V16QI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubsbs %3,%2,%1\;vmaxsb %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "altivec_abss_v8hi" + [(set (match_operand:V8HI 0 "register_operand" "=v") + (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v")] 211)) + (clobber (match_scratch:V8HI 2 "=v")) + (clobber (match_scratch:V8HI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubshs %3,%2,%1\;vmaxsh %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")]) + +(define_insn "altivec_abss_v4si" + [(set (match_operand:V4SI 0 "register_operand" "=v") + (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v")] 212)) + (clobber (match_scratch:V4SI 2 "=v")) + (clobber (match_scratch:V4SI 3 "=v"))] + "TARGET_ALTIVEC" + "vspltisb %2,0\;vsubsws %3,%2,%1\;vmaxsw %0,%1,%3" + [(set_attr "type" "altivec") + (set_attr "length" "12")])