Change __x86_64 prefix in cache size to __x86

This commit is contained in:
H.J. Lu 2013-01-05 16:00:38 -08:00
parent 0b3986d0dc
commit afec409af9
8 changed files with 79 additions and 55 deletions

View File

@ -1,3 +1,36 @@
2013-01-04 H.J. Lu <hongjiu.lu@intel.com>
* sysdeps/i386/i686/cacheinfo.c (__x86_64_data_cache_size): Removed.
(__x86_64_raw_data_cache_size): Likewise.
(__x86_64_data_cache_size_half): Likewise.
(__x86_64_raw_data_cache_size_half): Likewise.
(__x86_64_shared_cache_size): Likewise.
(__x86_64_raw_shared_cache_size): Likewise.
(__x86_64_shared_cache_size_half): Likewise.
(__x86_64_raw_shared_cache_size_half): Likewise.
* sysdeps/x86_64/cacheinfo.c (__x86_64_data_cache_size): Renamed
to ...
(__x86_data_cache_size): This.
(__x86_64_raw_data_cache_size): Renamed to ...
(__x86_raw_data_cache_size): This.
(__x86_64_data_cache_size_half): Renamed to ...
(__x86_data_cache_size_half): This.
(__x86_64_raw_data_cache_size_half): Renamed to ...
(__x86_raw_data_cache_size_half): This.
(__x86_64_shared_cache_size): Renamed to ...
(__x86_shared_cache_size): This.
(__x86_64_raw_shared_cache_size): Renamed to ...
(__x86_raw_shared_cache_size): This.
(__x86_64_shared_cache_size_half): Renamed to ...
(__x86_shared_cache_size_half): This.
(__x86_64_raw_shared_cache_size_half): Renamed to ...
(__x86_raw_shared_cache_size_half): This.
* sysdeps/x86_64/memcpy.S: Updated.
* sysdeps/x86_64/memset.S: Likewise.
* sysdeps/x86_64/multiarch/memcmp-sse4.S: Likewise.
* sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Likewise.
* sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise.
2013-01-04 David S. Miller <davem@davemloft.net>
* sysdeps/sparc/fpu/libm-test-ulps: Update.

View File

@ -1,12 +1,3 @@
#define __x86_64_data_cache_size __x86_data_cache_size
#define __x86_64_raw_data_cache_size __x86_raw_data_cache_size
#define __x86_64_data_cache_size_half __x86_data_cache_size_half
#define __x86_64_raw_data_cache_size_half __x86_raw_data_cache_size_half
#define __x86_64_shared_cache_size __x86_shared_cache_size
#define __x86_64_raw_shared_cache_size __x86_raw_shared_cache_size
#define __x86_64_shared_cache_size_half __x86_shared_cache_size_half
#define __x86_64_raw_shared_cache_size_half __x86_raw_shared_cache_size_half
#define DISABLE_PREFETCHW
#define DISABLE_PREFERRED_MEMORY_INSTRUCTION

View File

@ -505,24 +505,24 @@ __cache_sysconf (int name)
/* Data cache size for use in memory and string routines, typically
L1 size, rounded to multiple of 256 bytes. */
long int __x86_64_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
long int __x86_64_data_cache_size attribute_hidden = 32 * 1024;
/* Similar to __x86_64_data_cache_size_half, but not rounded. */
long int __x86_64_raw_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
/* Similar to __x86_64_data_cache_size, but not rounded. */
long int __x86_64_raw_data_cache_size attribute_hidden = 32 * 1024;
long int __x86_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
long int __x86_data_cache_size attribute_hidden = 32 * 1024;
/* Similar to __x86_data_cache_size_half, but not rounded. */
long int __x86_raw_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
/* Similar to __x86_data_cache_size, but not rounded. */
long int __x86_raw_data_cache_size attribute_hidden = 32 * 1024;
/* Shared cache size for use in memory and string routines, typically
L2 or L3 size, rounded to multiple of 256 bytes. */
long int __x86_64_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
long int __x86_64_shared_cache_size attribute_hidden = 1024 * 1024;
/* Similar to __x86_64_shared_cache_size_half, but not rounded. */
long int __x86_64_raw_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
/* Similar to __x86_64_shared_cache_size, but not rounded. */
long int __x86_64_raw_shared_cache_size attribute_hidden = 1024 * 1024;
long int __x86_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
long int __x86_shared_cache_size attribute_hidden = 1024 * 1024;
/* Similar to __x86_shared_cache_size_half, but not rounded. */
long int __x86_raw_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
/* Similar to __x86_shared_cache_size, but not rounded. */
long int __x86_raw_shared_cache_size attribute_hidden = 1024 * 1024;
#ifndef DISABLE_PREFETCHW
/* PREFETCHW support flag for use in memory and string routines. */
int __x86_64_prefetchw attribute_hidden;
int __x86_prefetchw attribute_hidden;
#endif
#ifndef DISABLE_PREFERRED_MEMORY_INSTRUCTION
@ -534,7 +534,7 @@ int __x86_64_prefetchw attribute_hidden;
3: SSSE3 instructions
*/
int __x86_64_preferred_memory_instruction attribute_hidden;
int __x86_preferred_memory_instruction attribute_hidden;
#endif
@ -591,9 +591,9 @@ init_cacheinfo (void)
/* Intel prefers SSSE3 instructions for memory/string routines
if they are available. */
if ((ecx & 0x200))
__x86_64_preferred_memory_instruction = 3;
__x86_preferred_memory_instruction = 3;
else
__x86_64_preferred_memory_instruction = 2;
__x86_preferred_memory_instruction = 2;
#endif
/* Figure out the number of logical threads that share the
@ -684,9 +684,9 @@ init_cacheinfo (void)
if they are avaiable, otherwise it prefers integer
instructions. */
if ((ecx & 0x200))
__x86_64_preferred_memory_instruction = 3;
__x86_preferred_memory_instruction = 3;
else
__x86_64_preferred_memory_instruction = 0;
__x86_preferred_memory_instruction = 0;
#endif
/* Get maximum extended function. */
@ -730,28 +730,28 @@ init_cacheinfo (void)
__cpuid (0x80000001, eax, ebx, ecx, edx);
/* PREFETCHW || 3DNow! */
if ((ecx & 0x100) || (edx & 0x80000000))
__x86_64_prefetchw = -1;
__x86_prefetchw = -1;
}
#endif
}
if (data > 0)
{
__x86_64_raw_data_cache_size_half = data / 2;
__x86_64_raw_data_cache_size = data;
__x86_raw_data_cache_size_half = data / 2;
__x86_raw_data_cache_size = data;
/* Round data cache size to multiple of 256 bytes. */
data = data & ~255L;
__x86_64_data_cache_size_half = data / 2;
__x86_64_data_cache_size = data;
__x86_data_cache_size_half = data / 2;
__x86_data_cache_size = data;
}
if (shared > 0)
{
__x86_64_raw_shared_cache_size_half = shared / 2;
__x86_64_raw_shared_cache_size = shared;
__x86_raw_shared_cache_size_half = shared / 2;
__x86_raw_shared_cache_size = shared;
/* Round shared cache size to multiple of 256 bytes. */
shared = shared & ~255L;
__x86_64_shared_cache_size_half = shared / 2;
__x86_64_shared_cache_size = shared;
__x86_shared_cache_size_half = shared / 2;
__x86_shared_cache_size = shared;
}
}

View File

@ -254,7 +254,7 @@ L(32after):
L(fasttry): /* first 1/2 L1 */
#ifndef NOT_IN_libc /* only up to this algorithm outside of libc.so */
mov __x86_64_data_cache_size_half(%rip), %R11_LP
mov __x86_data_cache_size_half(%rip), %R11_LP
cmpq %rdx, %r11 /* calculate the smaller of */
cmovaq %rdx, %r11 /* remaining bytes and 1/2 L1 */
#endif
@ -303,7 +303,7 @@ L(fastafter):
/* Handle large blocks smaller than 1/2 L2. */
L(pretry): /* first 1/2 L2 */
mov __x86_64_shared_cache_size_half (%rip), %R8_LP
mov __x86_shared_cache_size_half (%rip), %R8_LP
cmpq %rdx, %r8 /* calculate the lesser of */
cmovaq %rdx, %r8 /* remaining bytes and 1/2 L2 */
@ -322,7 +322,7 @@ L(pre): /* 64-byte with prefetching */
movq %rbx, SAVE3(%rsp)
cfi_rel_offset (%rbx, SAVE3)
cmpl $0, __x86_64_prefetchw(%rip)
cmpl $0, __x86_prefetchw(%rip)
jz L(preloop) /* check if PREFETCHW OK */
.p2align 4

View File

@ -862,7 +862,7 @@ L(SSE15Q0): mov %rdx,-0xf(%rdi)
.balign 16
L(byte32sse2_pre):
mov __x86_64_shared_cache_size(%rip),%r9d # The largest cache size
mov __x86_shared_cache_size(%rip),%r9d # The largest cache size
cmp %r9,%r8
ja L(sse2_nt_move_pre)
#jmp L(byte32sse2)
@ -1205,7 +1205,7 @@ L(SSExDx):
#ifndef USE_MULTIARCH
L(aligned_now):
cmpl $0x1,__x86_64_preferred_memory_instruction(%rip)
cmpl $0x1,__x86_preferred_memory_instruction(%rip)
jg L(SSE_pre)
#endif /* USE_MULTIARCH */
@ -1262,7 +1262,7 @@ L(8byte_move_skip):
.balign 16
L(8byte_stos_try):
mov __x86_64_shared_cache_size(%rip),%r9d // ck largest cache size
mov __x86_shared_cache_size(%rip),%r9d // ck largest cache size
cmpq %r8,%r9 // calculate the lesser of remaining
cmovaq %r8,%r9 // bytes and largest cache size
jbe L(8byte_stos)

View File

@ -321,7 +321,7 @@ L(512bytesormore):
# ifdef DATA_CACHE_SIZE_HALF
mov $DATA_CACHE_SIZE_HALF, %R8_LP
# else
mov __x86_64_data_cache_size_half(%rip), %R8_LP
mov __x86_data_cache_size_half(%rip), %R8_LP
# endif
mov %r8, %r9
shr $1, %r8
@ -637,7 +637,7 @@ L(512bytesormorein2aligned):
# ifdef DATA_CACHE_SIZE_HALF
mov $DATA_CACHE_SIZE_HALF, %R8_LP
# else
mov __x86_64_data_cache_size_half(%rip), %R8_LP
mov __x86_data_cache_size_half(%rip), %R8_LP
# endif
mov %r8, %r9
shr $1, %r8

View File

@ -108,7 +108,7 @@ L(144bytesormore):
#ifdef DATA_CACHE_SIZE
mov $DATA_CACHE_SIZE, %RCX_LP
#else
mov __x86_64_data_cache_size(%rip), %RCX_LP
mov __x86_data_cache_size(%rip), %RCX_LP
#endif
cmp %rcx, %rdx
jae L(gobble_mem_fwd)
@ -124,7 +124,7 @@ L(copy_backward):
#ifdef DATA_CACHE_SIZE
mov $DATA_CACHE_SIZE, %RCX_LP
#else
mov __x86_64_data_cache_size(%rip), %RCX_LP
mov __x86_data_cache_size(%rip), %RCX_LP
#endif
shl $1, %rcx
cmp %rcx, %rdx
@ -158,7 +158,7 @@ L(shl_0):
#ifdef DATA_CACHE_SIZE
cmp $DATA_CACHE_SIZE_HALF, %R9_LP
#else
cmp __x86_64_data_cache_size_half(%rip), %R9_LP
cmp __x86_data_cache_size_half(%rip), %R9_LP
#endif
jae L(gobble_mem_fwd)
sub $0x80, %rdx
@ -1480,7 +1480,7 @@ L(gobble_mem_fwd):
#ifdef SHARED_CACHE_SIZE_HALF
mov $SHARED_CACHE_SIZE_HALF, %RCX_LP
#else
mov __x86_64_shared_cache_size_half(%rip), %RCX_LP
mov __x86_shared_cache_size_half(%rip), %RCX_LP
#endif
#ifdef USE_AS_MEMMOVE
mov %rsi, %r9
@ -1587,7 +1587,7 @@ L(gobble_mem_bwd):
#ifdef SHARED_CACHE_SIZE_HALF
mov $SHARED_CACHE_SIZE_HALF, %RCX_LP
#else
mov __x86_64_shared_cache_size_half(%rip), %RCX_LP
mov __x86_shared_cache_size_half(%rip), %RCX_LP
#endif
#ifdef USE_AS_MEMMOVE
mov %rdi, %r9

View File

@ -99,7 +99,7 @@ L(80bytesormore):
#ifdef SHARED_CACHE_SIZE_HALF
mov $SHARED_CACHE_SIZE_HALF, %RCX_LP
#else
mov __x86_64_shared_cache_size_half(%rip), %RCX_LP
mov __x86_shared_cache_size_half(%rip), %RCX_LP
#endif
cmp %rcx, %rdx
mov %rsi, %r9
@ -109,7 +109,7 @@ L(80bytesormore):
#ifdef DATA_CACHE_SIZE_HALF
mov $DATA_CACHE_SIZE_HALF, %RCX_LP
#else
mov __x86_64_data_cache_size_half(%rip), %RCX_LP
mov __x86_data_cache_size_half(%rip), %RCX_LP
#endif
BRANCH_TO_JMPTBL_ENTRY (L(shl_table), %r9, 4)
@ -129,7 +129,7 @@ L(copy_backward):
#ifdef SHARED_CACHE_SIZE_HALF
mov $SHARED_CACHE_SIZE_HALF, %RCX_LP
#else
mov __x86_64_shared_cache_size_half(%rip), %RCX_LP
mov __x86_shared_cache_size_half(%rip), %RCX_LP
#endif
cmp %rcx, %rdx
@ -140,7 +140,7 @@ L(copy_backward):
#ifdef DATA_CACHE_SIZE_HALF
mov $DATA_CACHE_SIZE_HALF, %RCX_LP
#else
mov __x86_64_data_cache_size_half(%rip), %RCX_LP
mov __x86_data_cache_size_half(%rip), %RCX_LP
#endif
BRANCH_TO_JMPTBL_ENTRY (L(shl_table_bwd), %r9, 4)
@ -177,7 +177,7 @@ L(shl_0_gobble):
#ifdef DATA_CACHE_SIZE_HALF
cmp $DATA_CACHE_SIZE_HALF, %RDX_LP
#else
cmp __x86_64_data_cache_size_half(%rip), %RDX_LP
cmp __x86_data_cache_size_half(%rip), %RDX_LP
#endif
lea -128(%rdx), %rdx
jae L(shl_0_gobble_mem_loop)
@ -318,7 +318,7 @@ L(shl_0_gobble_bwd):
#ifdef DATA_CACHE_SIZE_HALF
cmp $DATA_CACHE_SIZE_HALF, %RDX_LP
#else
cmp __x86_64_data_cache_size_half(%rip), %RDX_LP
cmp __x86_data_cache_size_half(%rip), %RDX_LP
#endif
lea -128(%rdx), %rdx
jae L(shl_0_gobble_mem_bwd_loop)