Fix internal error with vectorization on SPARC

This is a regression present since the 10.x series, but the underlying issue
has been there since the TARGET_VEC_PERM_CONST hook was implemented, in the
form of an ICE when expanding a constant VEC_PERM_EXPR in V4QI, while the
back-end only supports V8QI constant VEC_PERM_EXPRs.

gcc/
	PR target/105292
	* config/sparc/sparc.cc (sparc_vectorize_vec_perm_const): Return
	true only for 8-byte vector modes.

gcc/testsuite/
	* gcc.target/sparc/20220510-1.c: New test.
This commit is contained in:
Eric Botcazou 2022-05-10 09:33:16 +02:00
parent 6d7d4f59c3
commit b931d0cfd3
2 changed files with 33 additions and 2 deletions

View File

@ -13041,9 +13041,9 @@ sparc_vectorize_vec_perm_const (machine_mode vmode, rtx target, rtx op0,
if (!TARGET_VIS2)
return false;
/* All permutes are supported. */
/* All 8-byte permutes are supported. */
if (!target)
return true;
return GET_MODE_SIZE (vmode) == 8;
/* Force target-independent code to convert constant permutations on other
modes down to V8QI. Rely on this to avoid the complexity of the byte

View File

@ -0,0 +1,31 @@
/* PR target/105292 */
/* Reported by Koakuma <koachan+gccbugs@protonmail.com> */
/* { dg-do compile } */
/* { dg-options "-O3 -mvis2" } */
extern void get_vbytes_v2 (unsigned);
typedef struct {
unsigned ctt_info;
unsigned ctt_size;
} ctf_type_t;
typedef struct {
unsigned short cts_offset;
unsigned short cts_bits;
} ctf_slice_t;
void flip_types_len (ctf_type_t *t, int bsx1, int bsx2)
{
const int kind = t->ctt_info;
get_vbytes_v2 (t->ctt_size);
if (kind == 4)
{
ctf_slice_t *s = (ctf_slice_t *)t;
s->cts_offset = __builtin_bswap16(bsx1);
s->cts_bits = __builtin_bswap16(bsx2);
}
}