2012-10-30 James Greenhalgh <james.greenhalgh@arm.com>

Tejas Belagod  <tejas.belagod@arm.com>

	* config/aarch64/aarch64-simd.md
	(aarch64_simd_bsl<mode>_internal): New pattern.
	(aarch64_simd_bsl<mode>): Likewise.
	(aarch64_vcond_internal<mode>): Likewise.
	(vcondu<mode><mode>): Likewise.
	(vcond<mode><mode>): Likewise.
	* config/aarch64/iterators.md (UNSPEC_BSL): Add to define_constants.


Co-Authored-By: Tejas Belagod <tejas.belagod@arm.com>

From-SVN: r192985
This commit is contained in:
James Greenhalgh 2012-10-30 12:31:49 +00:00 committed by James Greenhalgh
parent c96cab6e77
commit 4fda1ad1dc
3 changed files with 156 additions and 0 deletions

View File

@ -1,3 +1,14 @@
2012-10-30 James Greenhalgh <james.greenhalgh@arm.com>
Tejas Belagod <tejas.belagod@arm.com>
* config/aarch64/aarch64-simd.md
(aarch64_simd_bsl<mode>_internal): New pattern.
(aarch64_simd_bsl<mode>): Likewise.
(aarch64_vcond_internal<mode>): Likewise.
(vcondu<mode><mode>): Likewise.
(vcond<mode><mode>): Likewise.
* config/aarch64/iterators.md (UNSPEC_BSL): Add to define_constants.
2012-10-30 Richard Biener <rguenther@suse.de>
PR tree-optimization/55111

View File

@ -1467,6 +1467,150 @@
(set_attr "simd_mode" "V2SI")]
)
;; vbsl_* intrinsics may compile to any of vbsl/vbif/vbit depending on register
;; allocation. For an intrinsic of form:
;; vD = bsl_* (vS, vN, vM)
;; We can use any of:
;; bsl vS, vN, vM (if D = S)
;; bit vD, vN, vS (if D = M, so 1-bits in vS choose bits from vN, else vM)
;; bif vD, vM, vS (if D = N, so 0-bits in vS choose bits from vM, else vN)
(define_insn "aarch64_simd_bsl<mode>_internal"
[(set (match_operand:VDQ 0 "register_operand" "=w,w,w")
(unspec:VDQ [(match_operand:VDQ 1 "register_operand" " 0,w,w")
(match_operand:VDQ 2 "register_operand" " w,w,0")
(match_operand:VDQ 3 "register_operand" " w,0,w")]
UNSPEC_BSL))]
"TARGET_SIMD"
"@
bsl\\t%0.<Vbtype>, %2.<Vbtype>, %3.<Vbtype>
bit\\t%0.<Vbtype>, %2.<Vbtype>, %1.<Vbtype>
bif\\t%0.<Vbtype>, %3.<Vbtype>, %1.<Vbtype>"
)
(define_expand "aarch64_simd_bsl<mode>"
[(set (match_operand:VDQ 0 "register_operand")
(unspec:VDQ [(match_operand:<V_cmp_result> 1 "register_operand")
(match_operand:VDQ 2 "register_operand")
(match_operand:VDQ 3 "register_operand")]
UNSPEC_BSL))]
"TARGET_SIMD"
{
/* We can't alias operands together if they have different modes. */
operands[1] = gen_lowpart (<MODE>mode, operands[1]);
})
(define_expand "aarch64_vcond_internal<mode>"
[(set (match_operand:VDQ 0 "register_operand")
(if_then_else:VDQ
(match_operator 3 "comparison_operator"
[(match_operand:VDQ 4 "register_operand")
(match_operand:VDQ 5 "nonmemory_operand")])
(match_operand:VDQ 1 "register_operand")
(match_operand:VDQ 2 "register_operand")))]
"TARGET_SIMD"
{
int inverse = 0, has_zero_imm_form = 0;
rtx mask = gen_reg_rtx (<MODE>mode);
switch (GET_CODE (operands[3]))
{
case LE:
case LT:
case NE:
inverse = 1;
/* Fall through. */
case GE:
case GT:
case EQ:
has_zero_imm_form = 1;
break;
case LEU:
case LTU:
inverse = 1;
break;
default:
break;
}
if (!REG_P (operands[5])
&& (operands[5] != CONST0_RTX (<MODE>mode) || !has_zero_imm_form))
operands[5] = force_reg (<MODE>mode, operands[5]);
switch (GET_CODE (operands[3]))
{
case LT:
case GE:
emit_insn (gen_aarch64_cmge<mode> (mask, operands[4], operands[5]));
break;
case LE:
case GT:
emit_insn (gen_aarch64_cmgt<mode> (mask, operands[4], operands[5]));
break;
case LTU:
case GEU:
emit_insn (gen_aarch64_cmhs<mode> (mask, operands[4], operands[5]));
break;
case LEU:
case GTU:
emit_insn (gen_aarch64_cmhi<mode> (mask, operands[4], operands[5]));
break;
case NE:
case EQ:
emit_insn (gen_aarch64_cmeq<mode> (mask, operands[4], operands[5]));
break;
default:
gcc_unreachable ();
}
if (inverse)
emit_insn (gen_aarch64_simd_bsl<mode> (operands[0], mask, operands[2],
operands[1]));
else
emit_insn (gen_aarch64_simd_bsl<mode> (operands[0], mask, operands[1],
operands[2]));
DONE;
})
(define_expand "vcond<mode><mode>"
[(set (match_operand:VDQ 0 "register_operand")
(if_then_else:VDQ
(match_operator 3 "comparison_operator"
[(match_operand:VDQ 4 "register_operand")
(match_operand:VDQ 5 "nonmemory_operand")])
(match_operand:VDQ 1 "register_operand")
(match_operand:VDQ 2 "register_operand")))]
"TARGET_SIMD"
{
emit_insn (gen_aarch64_vcond_internal<mode> (operands[0], operands[1],
operands[2], operands[3],
operands[4], operands[5]));
DONE;
})
(define_expand "vcondu<mode><mode>"
[(set (match_operand:VDQ 0 "register_operand")
(if_then_else:VDQ
(match_operator 3 "comparison_operator"
[(match_operand:VDQ 4 "register_operand")
(match_operand:VDQ 5 "nonmemory_operand")])
(match_operand:VDQ 1 "register_operand")
(match_operand:VDQ 2 "register_operand")))]
"TARGET_SIMD"
{
emit_insn (gen_aarch64_vcond_internal<mode> (operands[0], operands[1],
operands[2], operands[3],
operands[4], operands[5]));
DONE;
})
;; Patterns for AArch64 SIMD Intrinsics.
(define_expand "aarch64_create<mode>"

View File

@ -227,6 +227,7 @@
UNSPEC_CMTST ; Used in aarch64-simd.md.
UNSPEC_FMAX ; Used in aarch64-simd.md.
UNSPEC_FMIN ; Used in aarch64-simd.md.
UNSPEC_BSL ; Used in aarch64-simd.md.
])
;; -------------------------------------------------------------------