re PR target/51387 (Test vect.exp/vect-116.c fails on execution when compiled with -mavx2 on sde.)

PR target/51387
	* config/i386/sse.md (mul<mode>3 with VI1_AVX2 iterator): For
	V32QImode use { 0,2,..,14,32,34,..,46,16,18,..,30,48,50,..,62 }
	permutation instead of extract even permutation.

From-SVN: r181951
This commit is contained in:
Jakub Jelinek 2011-12-02 22:57:15 +01:00 committed by Jakub Jelinek
parent 8919c0d984
commit 4bd97bee25
2 changed files with 25 additions and 1 deletions

View File

@ -1,3 +1,10 @@
2011-12-02 Jakub Jelinek <jakub@redhat.com>
PR target/51387
* config/i386/sse.md (mul<mode>3 with VI1_AVX2 iterator): For
V32QImode use { 0,2,..,14,32,34,..,46,16,18,..,30,48,50,..,62 }
permutation instead of extract even permutation.
2011-12-02 Nathan Sidwell <nathan@acm.org>
* gcov.c (struct arc_info): Add is_throw field.

View File

@ -5066,7 +5066,24 @@
gen_lowpart (mulmode, t[3]))));
/* Extract the even bytes and merge them back together. */
ix86_expand_vec_extract_even_odd (operands[0], t[5], t[4], 0);
if (<MODE>mode == V16QImode)
ix86_expand_vec_extract_even_odd (operands[0], t[5], t[4], 0);
else
{
/* Since avx2_interleave_{low,high}v32qi used above aren't cross-lane,
this can't be normal even extraction, but one where additionally
the second and third quarter are swapped. That is even one insn
shorter than even extraction. */
rtvec v = rtvec_alloc (32);
for (i = 0; i < 32; ++i)
RTVEC_ELT (v, i)
= GEN_INT (i * 2 + ((i & 24) == 8 ? 16 : (i & 24) == 16 ? -16 : 0));
t[0] = operands[0];
t[1] = t[5];
t[2] = t[4];
t[3] = gen_rtx_CONST_VECTOR (<MODE>mode, v);
ix86_expand_vec_perm_const (t);
}
set_unique_reg_note (get_last_insn (), REG_EQUAL,
gen_rtx_MULT (<MODE>mode, operands[1], operands[2]));