Detect EXT patterns to vec_perm_const, use for EXT intrinsics.

(part 2, fix ICE at -O0)

	* config/aarch64/aarch64.c (aarch64_evpc_ext): allow and handle
	location == 0.

From-SVN: r211177
This commit is contained in:
Alan Lawrence 2014-06-03 11:56:24 +00:00 committed by Alan Lawrence
parent 923fcec3d8
commit b31e65bb60
2 changed files with 9 additions and 4 deletions

View File

@ -1,3 +1,8 @@
2014-06-03 Alan Lawrence <alan.lawrence@arm.com>
* config/aarch64/aarch64.c (aarch64_evpc_ext): allow and handle
location == 0.
2014-06-03 Alan Lawrence <alan.lawrence@arm.com> 2014-06-03 Alan Lawrence <alan.lawrence@arm.com>
* config/aarch64/aarch64-simd.md (aarch64_rev<REVERSE:rev-op><mode>): * config/aarch64/aarch64-simd.md (aarch64_rev<REVERSE:rev-op><mode>):

View File

@ -9018,9 +9018,6 @@ aarch64_evpc_ext (struct expand_vec_perm_d *d)
return false; return false;
} }
/* The mid-end handles masks that just return one of the input vectors. */
gcc_assert (location != 0);
switch (d->vmode) switch (d->vmode)
{ {
case V16QImode: gen = gen_aarch64_extv16qi; break; case V16QImode: gen = gen_aarch64_extv16qi; break;
@ -9041,7 +9038,10 @@ aarch64_evpc_ext (struct expand_vec_perm_d *d)
if (d->testing_p) if (d->testing_p)
return true; return true;
if (BYTES_BIG_ENDIAN) /* The case where (location == 0) is a no-op for both big- and little-endian,
and is removed by the mid-end at optimization levels -O1 and higher. */
if (BYTES_BIG_ENDIAN && (location != 0))
{ {
/* After setup, we want the high elements of the first vector (stored /* After setup, we want the high elements of the first vector (stored
at the LSB end of the register), and the low elements of the second at the LSB end of the register), and the low elements of the second