Fix v9/64-bit strcmp when string ends in multiple zero bytes.

[BZ #16885]
	* sysdeps/sparc/sparc64/strcmp.S: Fix end comparison handling when
	multiple zero bytes exist at the end of a string.
	Reported by Aurelien Jarno <aurelien@aurel32.net>

	* string/test-strcmp.c (check): Add explicit test for situations where
	there are multiple zero bytes after the first.
This commit is contained in:
David S. Miller 2014-04-30 12:57:51 -07:00
parent 4fdfe821e2
commit 5331255b6e
3 changed files with 69 additions and 0 deletions

View File

@ -1,3 +1,13 @@
2014-05-01 David S. Miller <davem@davemloft.net>
[BZ #16885]
* sysdeps/sparc/sparc64/strcmp.S: Fix end comparison handling when
multiple zero bytes exist at the end of a string.
Reported by Aurelien Jarno <aurelien@aurel32.net>
* string/test-strcmp.c (check): Add explicit test for situations where
there are multiple zero bytes after the first.
2014-05-01 Andreas Schwab <schwab@linux-m68k.org>
[BZ #16890]

View File

@ -329,6 +329,34 @@ check (void)
FOR_EACH_IMPL (impl, 0)
check_result (impl, s1 + i1, s2 + i2, exp_result);
}
/* Test cases where there are multiple zero bytes after the first. */
for (size_t i = 0; i < 16 + 1; i++)
{
s1[i] = 0x00;
s2[i] = 0x00;
}
for (size_t i = 0; i < 16; i++)
{
int exp_result;
for (int val = 0x01; val < 0x100; val++)
{
for (size_t j = 0; j < i; j++)
{
s1[j] = val;
s2[j] = val;
}
s2[i] = val;
exp_result = SIMPLE_STRCMP (s1, s2);
FOR_EACH_IMPL (impl, 0)
check_result (impl, s1, s2, exp_result);
}
}
}

View File

@ -121,6 +121,37 @@ ENTRY(strcmp)
movleu %xcc, -1, %o0
srlx rTMP1, 7, rTMP1
/* In order not to be influenced by bytes after the zero byte, we
* have to retain only the highest bit in the mask for the comparison
* with rSTRXOR to work properly.
*/
mov 0, rTMP2
andcc rTMP1, 0x0100, %g0
movne %xcc, 8, rTMP2
sllx rTMP1, 63 - 16, %o1
movrlz %o1, 16, rTMP2
sllx rTMP1, 63 - 24, %o1
movrlz %o1, 24, rTMP2
sllx rTMP1, 63 - 32, %o1
movrlz %o1, 32, rTMP2
sllx rTMP1, 63 - 40, %o1
movrlz %o1, 40, rTMP2
sllx rTMP1, 63 - 48, %o1
movrlz %o1, 48, rTMP2
sllx rTMP1, 63 - 56, %o1
movrlz %o1, 56, rTMP2
srlx rTMP1, rTMP2, rTMP1
sllx rTMP1, rTMP2, rTMP1
cmp rTMP1, rSTRXOR
retl
movgu %xcc, 0, %o0