x86-64: Skip zero length in __mem[pcpy|move|set]_erms

This patch skips zero length in __mempcpy_erms, __memmove_erms and
__memset_erms.

Tested on x86-64.

	* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
	(__mempcpy_erms): Skip zero length.
	(__memmove_erms): Likewise.
	* sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
	(__memset_erms): Likewise.
This commit is contained in:
H.J. Lu 2018-05-23 11:25:20 -07:00
parent 2834fb4610
commit 727b38df05
3 changed files with 19 additions and 0 deletions

View File

@ -1,3 +1,11 @@
2018-05-23 H.J. Lu <hongjiu.lu@intel.com>
* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S
(__mempcpy_erms): Skip zero length.
(__memmove_erms): Likewise.
* sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S
(__memset_erms): Likewise.
2018-05-23 Joseph Myers <joseph@codesourcery.com>
* sysdeps/alpha/Implies: Remove alpha/soft-fp.

View File

@ -156,6 +156,9 @@ END (__mempcpy_chk_erms)
/* Only used to measure performance of REP MOVSB. */
ENTRY (__mempcpy_erms)
movq %rdi, %rax
/* Skip zero length. */
testq %rdx, %rdx
jz 2f
addq %rdx, %rax
jmp L(start_movsb)
END (__mempcpy_erms)
@ -167,6 +170,9 @@ END (__memmove_chk_erms)
ENTRY (__memmove_erms)
movq %rdi, %rax
/* Skip zero length. */
testq %rdx, %rdx
jz 2f
L(start_movsb):
movq %rdx, %rcx
cmpq %rsi, %rdi

View File

@ -128,6 +128,11 @@ END (__memset_chk_erms)
/* Only used to measure performance of REP STOSB. */
ENTRY (__memset_erms)
/* Skip zero length. */
testq %rdx, %rdx
jnz L(stosb)
movq %rdi, %rax
ret
# else
/* Provide a hidden symbol to debugger. */
.hidden MEMSET_SYMBOL (__memset, erms)