x86-64 memcpy: Properly handle the length parameter [BZ# 24097]
On x32, the size_t parameter may be passed in the lower 32 bits of a 64-bit register with the non-zero upper 32 bits. The string/memory functions written in assembly can only use the lower 32 bits of a 64-bit register as length or must clear the upper 32 bits before using the full 64-bit register for length. This pach fixes memcpy for x32. Tested on x86-64 and x32. On x86-64, libc.so is the same with and withou the fix. [BZ# 24097] CVE-2019-6488 * sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Use RDX_LP for length. Clear the upper 32 bits of RDX register. * sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise. * sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S: Likewise. * sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S: Likewise. * sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memcpy. tst-size_t-wmemchr. * sysdeps/x86_64/x32/tst-size_t-memcpy.c: New file.
This commit is contained in:
parent
b304fc201d
commit
231c56760c
15
ChangeLog
15
ChangeLog
|
@ -1,3 +1,18 @@
|
||||||
|
2019-01-21 H.J. Lu <hongjiu.lu@intel.com>
|
||||||
|
|
||||||
|
[BZ# 24097]
|
||||||
|
CVE-2019-6488
|
||||||
|
* sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Use RDX_LP for
|
||||||
|
length. Clear the upper 32 bits of RDX register.
|
||||||
|
* sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise.
|
||||||
|
* sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S:
|
||||||
|
Likewise.
|
||||||
|
* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:
|
||||||
|
Likewise.
|
||||||
|
* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memcpy.
|
||||||
|
tst-size_t-wmemchr.
|
||||||
|
* sysdeps/x86_64/x32/tst-size_t-memcpy.c: New file.
|
||||||
|
|
||||||
2019-01-21 H.J. Lu <hongjiu.lu@intel.com>
|
2019-01-21 H.J. Lu <hongjiu.lu@intel.com>
|
||||||
|
|
||||||
[BZ# 24097]
|
[BZ# 24097]
|
||||||
|
|
|
@ -45,28 +45,33 @@
|
||||||
.section .text.ssse3,"ax",@progbits
|
.section .text.ssse3,"ax",@progbits
|
||||||
#if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
|
#if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
|
||||||
ENTRY (MEMPCPY_CHK)
|
ENTRY (MEMPCPY_CHK)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMPCPY_CHK)
|
END (MEMPCPY_CHK)
|
||||||
|
|
||||||
ENTRY (MEMPCPY)
|
ENTRY (MEMPCPY)
|
||||||
movq %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
addq %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
jmp L(start)
|
jmp L(start)
|
||||||
END (MEMPCPY)
|
END (MEMPCPY)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if !defined USE_AS_BCOPY
|
#if !defined USE_AS_BCOPY
|
||||||
ENTRY (MEMCPY_CHK)
|
ENTRY (MEMCPY_CHK)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMCPY_CHK)
|
END (MEMCPY_CHK)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ENTRY (MEMCPY)
|
ENTRY (MEMCPY)
|
||||||
mov %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
#ifdef USE_AS_MEMPCPY
|
#ifdef USE_AS_MEMPCPY
|
||||||
add %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef __ILP32__
|
||||||
|
/* Clear the upper 32 bits. */
|
||||||
|
mov %edx, %edx
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_AS_MEMMOVE
|
#ifdef USE_AS_MEMMOVE
|
||||||
|
|
|
@ -45,28 +45,33 @@
|
||||||
.section .text.ssse3,"ax",@progbits
|
.section .text.ssse3,"ax",@progbits
|
||||||
#if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
|
#if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
|
||||||
ENTRY (MEMPCPY_CHK)
|
ENTRY (MEMPCPY_CHK)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMPCPY_CHK)
|
END (MEMPCPY_CHK)
|
||||||
|
|
||||||
ENTRY (MEMPCPY)
|
ENTRY (MEMPCPY)
|
||||||
movq %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
addq %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
jmp L(start)
|
jmp L(start)
|
||||||
END (MEMPCPY)
|
END (MEMPCPY)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if !defined USE_AS_BCOPY
|
#if !defined USE_AS_BCOPY
|
||||||
ENTRY (MEMCPY_CHK)
|
ENTRY (MEMCPY_CHK)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMCPY_CHK)
|
END (MEMCPY_CHK)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ENTRY (MEMCPY)
|
ENTRY (MEMCPY)
|
||||||
mov %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
#ifdef USE_AS_MEMPCPY
|
#ifdef USE_AS_MEMPCPY
|
||||||
add %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef __ILP32__
|
||||||
|
/* Clear the upper 32 bits. */
|
||||||
|
mov %edx, %edx
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_AS_MEMMOVE
|
#ifdef USE_AS_MEMMOVE
|
||||||
|
|
|
@ -24,27 +24,31 @@
|
||||||
|
|
||||||
.section .text.avx512,"ax",@progbits
|
.section .text.avx512,"ax",@progbits
|
||||||
ENTRY (__mempcpy_chk_avx512_no_vzeroupper)
|
ENTRY (__mempcpy_chk_avx512_no_vzeroupper)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (__mempcpy_chk_avx512_no_vzeroupper)
|
END (__mempcpy_chk_avx512_no_vzeroupper)
|
||||||
|
|
||||||
ENTRY (__mempcpy_avx512_no_vzeroupper)
|
ENTRY (__mempcpy_avx512_no_vzeroupper)
|
||||||
movq %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
addq %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
jmp L(start)
|
jmp L(start)
|
||||||
END (__mempcpy_avx512_no_vzeroupper)
|
END (__mempcpy_avx512_no_vzeroupper)
|
||||||
|
|
||||||
ENTRY (__memmove_chk_avx512_no_vzeroupper)
|
ENTRY (__memmove_chk_avx512_no_vzeroupper)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (__memmove_chk_avx512_no_vzeroupper)
|
END (__memmove_chk_avx512_no_vzeroupper)
|
||||||
|
|
||||||
ENTRY (__memmove_avx512_no_vzeroupper)
|
ENTRY (__memmove_avx512_no_vzeroupper)
|
||||||
mov %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
# ifdef USE_AS_MEMPCPY
|
# ifdef USE_AS_MEMPCPY
|
||||||
add %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
# endif
|
# endif
|
||||||
L(start):
|
L(start):
|
||||||
|
# ifdef __ILP32__
|
||||||
|
/* Clear the upper 32 bits. */
|
||||||
|
mov %edx, %edx
|
||||||
|
# endif
|
||||||
lea (%rsi, %rdx), %rcx
|
lea (%rsi, %rdx), %rcx
|
||||||
lea (%rdi, %rdx), %r9
|
lea (%rdi, %rdx), %r9
|
||||||
cmp $512, %rdx
|
cmp $512, %rdx
|
||||||
|
|
|
@ -106,20 +106,20 @@
|
||||||
.section SECTION(.text),"ax",@progbits
|
.section SECTION(.text),"ax",@progbits
|
||||||
#if defined SHARED && IS_IN (libc)
|
#if defined SHARED && IS_IN (libc)
|
||||||
ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned))
|
ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned))
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned))
|
END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ENTRY (MEMPCPY_SYMBOL (__mempcpy, unaligned))
|
ENTRY (MEMPCPY_SYMBOL (__mempcpy, unaligned))
|
||||||
movq %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
addq %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
jmp L(start)
|
jmp L(start)
|
||||||
END (MEMPCPY_SYMBOL (__mempcpy, unaligned))
|
END (MEMPCPY_SYMBOL (__mempcpy, unaligned))
|
||||||
|
|
||||||
#if defined SHARED && IS_IN (libc)
|
#if defined SHARED && IS_IN (libc)
|
||||||
ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
|
ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
|
END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
|
||||||
#endif
|
#endif
|
||||||
|
@ -127,9 +127,13 @@ END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned))
|
||||||
ENTRY (MEMMOVE_SYMBOL (__memmove, unaligned))
|
ENTRY (MEMMOVE_SYMBOL (__memmove, unaligned))
|
||||||
movq %rdi, %rax
|
movq %rdi, %rax
|
||||||
L(start):
|
L(start):
|
||||||
cmpq $VEC_SIZE, %rdx
|
# ifdef __ILP32__
|
||||||
|
/* Clear the upper 32 bits. */
|
||||||
|
movl %edx, %edx
|
||||||
|
# endif
|
||||||
|
cmp $VEC_SIZE, %RDX_LP
|
||||||
jb L(less_vec)
|
jb L(less_vec)
|
||||||
cmpq $(VEC_SIZE * 2), %rdx
|
cmp $(VEC_SIZE * 2), %RDX_LP
|
||||||
ja L(more_2x_vec)
|
ja L(more_2x_vec)
|
||||||
#if !defined USE_MULTIARCH || !IS_IN (libc)
|
#if !defined USE_MULTIARCH || !IS_IN (libc)
|
||||||
L(last_2x_vec):
|
L(last_2x_vec):
|
||||||
|
@ -149,38 +153,38 @@ END (MEMMOVE_SYMBOL (__memmove, unaligned))
|
||||||
|
|
||||||
# if VEC_SIZE == 16
|
# if VEC_SIZE == 16
|
||||||
ENTRY (__mempcpy_chk_erms)
|
ENTRY (__mempcpy_chk_erms)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (__mempcpy_chk_erms)
|
END (__mempcpy_chk_erms)
|
||||||
|
|
||||||
/* Only used to measure performance of REP MOVSB. */
|
/* Only used to measure performance of REP MOVSB. */
|
||||||
ENTRY (__mempcpy_erms)
|
ENTRY (__mempcpy_erms)
|
||||||
movq %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
/* Skip zero length. */
|
/* Skip zero length. */
|
||||||
testq %rdx, %rdx
|
test %RDX_LP, %RDX_LP
|
||||||
jz 2f
|
jz 2f
|
||||||
addq %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
jmp L(start_movsb)
|
jmp L(start_movsb)
|
||||||
END (__mempcpy_erms)
|
END (__mempcpy_erms)
|
||||||
|
|
||||||
ENTRY (__memmove_chk_erms)
|
ENTRY (__memmove_chk_erms)
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (__memmove_chk_erms)
|
END (__memmove_chk_erms)
|
||||||
|
|
||||||
ENTRY (__memmove_erms)
|
ENTRY (__memmove_erms)
|
||||||
movq %rdi, %rax
|
movq %rdi, %rax
|
||||||
/* Skip zero length. */
|
/* Skip zero length. */
|
||||||
testq %rdx, %rdx
|
test %RDX_LP, %RDX_LP
|
||||||
jz 2f
|
jz 2f
|
||||||
L(start_movsb):
|
L(start_movsb):
|
||||||
movq %rdx, %rcx
|
mov %RDX_LP, %RCX_LP
|
||||||
cmpq %rsi, %rdi
|
cmp %RSI_LP, %RDI_LP
|
||||||
jb 1f
|
jb 1f
|
||||||
/* Source == destination is less common. */
|
/* Source == destination is less common. */
|
||||||
je 2f
|
je 2f
|
||||||
leaq (%rsi,%rcx), %rdx
|
lea (%rsi,%rcx), %RDX_LP
|
||||||
cmpq %rdx, %rdi
|
cmp %RDX_LP, %RDI_LP
|
||||||
jb L(movsb_backward)
|
jb L(movsb_backward)
|
||||||
1:
|
1:
|
||||||
rep movsb
|
rep movsb
|
||||||
|
@ -200,20 +204,20 @@ strong_alias (__memmove_chk_erms, __memcpy_chk_erms)
|
||||||
|
|
||||||
# ifdef SHARED
|
# ifdef SHARED
|
||||||
ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms))
|
ENTRY (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms))
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms))
|
END (MEMMOVE_CHK_SYMBOL (__mempcpy_chk, unaligned_erms))
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
ENTRY (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms))
|
ENTRY (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms))
|
||||||
movq %rdi, %rax
|
mov %RDI_LP, %RAX_LP
|
||||||
addq %rdx, %rax
|
add %RDX_LP, %RAX_LP
|
||||||
jmp L(start_erms)
|
jmp L(start_erms)
|
||||||
END (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms))
|
END (MEMMOVE_SYMBOL (__mempcpy, unaligned_erms))
|
||||||
|
|
||||||
# ifdef SHARED
|
# ifdef SHARED
|
||||||
ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
|
ENTRY (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
|
||||||
cmpq %rdx, %rcx
|
cmp %RDX_LP, %RCX_LP
|
||||||
jb HIDDEN_JUMPTARGET (__chk_fail)
|
jb HIDDEN_JUMPTARGET (__chk_fail)
|
||||||
END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
|
END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
|
||||||
# endif
|
# endif
|
||||||
|
@ -221,9 +225,13 @@ END (MEMMOVE_CHK_SYMBOL (__memmove_chk, unaligned_erms))
|
||||||
ENTRY (MEMMOVE_SYMBOL (__memmove, unaligned_erms))
|
ENTRY (MEMMOVE_SYMBOL (__memmove, unaligned_erms))
|
||||||
movq %rdi, %rax
|
movq %rdi, %rax
|
||||||
L(start_erms):
|
L(start_erms):
|
||||||
cmpq $VEC_SIZE, %rdx
|
# ifdef __ILP32__
|
||||||
|
/* Clear the upper 32 bits. */
|
||||||
|
movl %edx, %edx
|
||||||
|
# endif
|
||||||
|
cmp $VEC_SIZE, %RDX_LP
|
||||||
jb L(less_vec)
|
jb L(less_vec)
|
||||||
cmpq $(VEC_SIZE * 2), %rdx
|
cmp $(VEC_SIZE * 2), %RDX_LP
|
||||||
ja L(movsb_more_2x_vec)
|
ja L(movsb_more_2x_vec)
|
||||||
L(last_2x_vec):
|
L(last_2x_vec):
|
||||||
/* From VEC and to 2 * VEC. No branch when size == VEC_SIZE. */
|
/* From VEC and to 2 * VEC. No branch when size == VEC_SIZE. */
|
||||||
|
@ -250,7 +258,7 @@ L(movsb):
|
||||||
# endif
|
# endif
|
||||||
jb L(more_8x_vec_backward)
|
jb L(more_8x_vec_backward)
|
||||||
1:
|
1:
|
||||||
movq %rdx, %rcx
|
mov %RDX_LP, %RCX_LP
|
||||||
rep movsb
|
rep movsb
|
||||||
L(nop):
|
L(nop):
|
||||||
ret
|
ret
|
||||||
|
|
|
@ -6,7 +6,7 @@ CFLAGS-s_llround.c += -fno-builtin-lround
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(subdir),string)
|
ifeq ($(subdir),string)
|
||||||
tests += tst-size_t-memchr tst-size_t-memcmp
|
tests += tst-size_t-memchr tst-size_t-memcmp tst-size_t-memcpy
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(subdir),wcsmbs)
|
ifeq ($(subdir),wcsmbs)
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
/* Test memcpy with size_t in the lower 32 bits of 64-bit register.
|
||||||
|
Copyright (C) 2019 Free Software Foundation, Inc.
|
||||||
|
This file is part of the GNU C Library.
|
||||||
|
|
||||||
|
The GNU C Library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
The GNU C Library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with the GNU C Library; if not, see
|
||||||
|
<http://www.gnu.org/licenses/>. */
|
||||||
|
|
||||||
|
#define TEST_NAME "memcpy"
|
||||||
|
#include "test-size_t.h"
|
||||||
|
|
||||||
|
IMPL (memcpy, 1)
|
||||||
|
|
||||||
|
typedef void *(*proto_t) (void *, const void *, size_t);
|
||||||
|
|
||||||
|
static void *
|
||||||
|
__attribute__ ((noinline, noclone))
|
||||||
|
do_memcpy (parameter_t a, parameter_t b)
|
||||||
|
{
|
||||||
|
return CALL (&b, a.p, b.p, a.len);
|
||||||
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
test_main (void)
|
||||||
|
{
|
||||||
|
test_init ();
|
||||||
|
|
||||||
|
parameter_t dest = { { page_size }, buf1 };
|
||||||
|
parameter_t src = { { 0 }, buf2 };
|
||||||
|
|
||||||
|
int ret = 0;
|
||||||
|
FOR_EACH_IMPL (impl, 0)
|
||||||
|
{
|
||||||
|
src.fn = impl->fn;
|
||||||
|
do_memcpy (dest, src);
|
||||||
|
int res = memcmp (dest.p, src.p, dest.len);
|
||||||
|
if (res)
|
||||||
|
{
|
||||||
|
error (0, 0, "Wrong result in function %s: %i != 0",
|
||||||
|
impl->name, res);
|
||||||
|
ret = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret ? EXIT_FAILURE : EXIT_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
#include <support/test-driver.c>
|
Loading…
Reference in New Issue