* sysdeps/x86_64/cacheinfo.c: Comment out code added in support of

new memset.
	too high for the improvements.  Implement bzero unconditionally for
	use in libc.
This commit is contained in:
Ulrich Drepper 2007-10-17 15:58:16 +00:00
parent ac1cb5da08
commit 406f28dbe5
3 changed files with 24 additions and 21 deletions

View File

@ -1,7 +1,10 @@
2007-10-17 Ulrich Drepper <drepper@redhat.com>
* sysdeps/x86_64/cacheinfo.c: Comment out code added in support of
new memset.
* sysdeps/x86_64/memset.S: Revert to old version for now. The cost is
too high for the improvements.
too high for the improvements. Implement bzero unconditionally for
use in libc.
2007-10-17 Ulrich Drepper <drepper@redhat.com>
Jakub Jelinek <jakub@redhat.com>

View File

@ -404,10 +404,13 @@ long int __x86_64_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
/* Shared cache size for use in memory and string routines, typically
L2 or L3 size. */
long int __x86_64_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
#ifdef NOT_USED_RIGHT_NOW
long int __x86_64_shared_cache_size attribute_hidden = 1024 * 1024;
#endif
/* PREFETCHW support flag for use in memory and string routines. */
int __x86_64_prefetchw attribute_hidden;
#ifdef NOT_USED_RIGHT_NOW
/* Instructions preferred for memory and string routines.
0: Regular instructions
@ -417,6 +420,7 @@ int __x86_64_prefetchw attribute_hidden;
*/
int __x86_64_preferred_memory_instruction attribute_hidden;
#endif
static void
@ -459,12 +463,14 @@ init_cacheinfo (void)
: "=a" (eax), "=b" (ebx), "=c" (ecx), "=d" (edx)
: "0" (1));
#ifdef NOT_USED_RIGHT_NOW
/* Intel prefers SSSE3 instructions for memory/string rountines
if they are avaiable. */
if ((ecx & 0x200))
__x86_64_preferred_memory_instruction = 3;
else
__x86_64_preferred_memory_instruction = 2;
#endif
/* Figure out the number of logical threads that share the
highest cache level. */
@ -570,6 +576,8 @@ init_cacheinfo (void)
if (shared > 0)
{
__x86_64_shared_cache_size_half = shared / 2;
#ifdef NOT_USED_RIGHT_NOW
__x86_64_shared_cache_size = shared;
#endif
}
}

View File

@ -1,6 +1,6 @@
/* memset/bzero -- set memory area to CH/0
Optimized version for x86-64.
Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Andreas Jaeger <aj@suse.de>.
@ -24,37 +24,35 @@
#include "bp-sym.h"
#include "bp-asm.h"
/* BEWARE: `#ifdef memset' means that memset is redefined as `bzero' */
#define BZERO_P (defined memset)
/* This is somehow experimental and could made dependend on the cache
size. */
#define LARGE $120000
.text
#if !BZERO_P && defined PIC && !defined NOT_IN_libc
#ifndef NOT_IN_libc
ENTRY(bzero)
mov %rsi,%rdx /* Adjust parameter. */
xorl %esi,%esi /* Fill with 0s. */
jmp L(memset_entry)
END(bzero)
#endif
#if defined PIC && !defined NOT_IN_libc
ENTRY (__memset_chk)
cmpq %rdx, %rcx
jb HIDDEN_JUMPTARGET (__chk_fail)
END (__memset_chk)
#endif
ENTRY (memset)
#if BZERO_P
mov %rsi,%rdx /* Adjust parameter. */
xorl %esi,%esi /* Fill with 0s. */
#endif
L(memset_entry):
cmp $0x7,%rdx /* Check for small length. */
mov %rdi,%rcx /* Save ptr as return value. */
jbe 7f
#if BZERO_P
mov %rsi,%r8 /* Just copy 0. */
#else
/* Populate 8 bit data to full 64-bit. */
movabs $0x0101010101010101,%r8
movzbl %sil,%eax
imul %rax,%r8
#endif
test $0x7,%edi /* Check for alignment. */
je 2f
@ -109,12 +107,8 @@ ENTRY (memset)
dec %rdx
jne 8b
9:
#if BZERO_P
nop
#else
/* Load result (only if used as memset). */
mov %rdi,%rax /* start address of destination is result */
#endif
retq
.p2align 4
@ -135,11 +129,9 @@ ENTRY (memset)
jmp 4b
END (memset)
#if !BZERO_P
libc_hidden_builtin_def (memset)
#endif
#if !BZERO_P && defined PIC && !defined NOT_IN_libc
#if defined PIC && !defined NOT_IN_libc
strong_alias (__memset_chk, __memset_zero_constant_len_parameter)
.section .gnu.warning.__memset_zero_constant_len_parameter
.string "memset used with constant zero length parameter; this could be due to transposed parameters"