libstdc++: Add mem_order_hle_acquire/release to atomic.h v2

The underlying compiler supports additional __ATOMIC_HLE_ACQUIRE/RELEASE
memmodel flags for TSX, but this was not exposed to the C++ wrapper.
Handle it there.

These are additional flags, so some of assert checks need to mask
off the flags before checking the memory model type.

libstdc++-v3/:
2013-01-12  Andi Kleen  <ak@linux.intel.com>
	    Jonathan Wakely  <jwakely.gcc@gmail.com>

        PR libstdc++/55223
	* include/bits/atomic_base.h (__memory_order_modifier): Add
	__memory_order_mask, __memory_order_modifier_mask,
	__memory_order_hle_acquire, __memory_order_hle_release.
	(operator|,operator&): Add.
	(__cmpexch_failure_order):  Rename to __cmpexch_failure_order2.
	(__cmpexch_failure_order): Add.
	(clear, store, load, compare_exchange_weak, compare_exchange_strong):
	Handle flags.
	* testsuite/29_atomics/atomic_flag/test_and_set/explicit-hle.cc:
	Add.

Co-Authored-By: Jonathan Wakely <jwakely.gcc@gmail.com>

From-SVN: r195321
This commit is contained in:
Andi Kleen 2013-01-20 19:03:22 +00:00 committed by Andi Kleen
parent fe6035536a
commit d76b6ea417
3 changed files with 232 additions and 45 deletions

View File

@ -1,3 +1,18 @@
2013-01-12 Andi Kleen <ak@linux.intel.com>
Jonathan Wakely <jwakely.gcc@gmail.com>
PR libstdc++/55223
* include/bits/atomic_base.h (__memory_order_modifier): Add
__memory_order_mask, __memory_order_modifier_mask,
__memory_order_hle_acquire, __memory_order_hle_release.
(operator|,operator&): Add.
(__cmpexch_failure_order): Rename to __cmpexch_failure_order2.
(__cmpexch_failure_order): Add.
(clear, store, load, compare_exchange_weak, compare_exchange_strong):
Handle flags.
* testsuite/29_atomics/atomic_flag/test_and_set/explicit-hle.cc:
Add.
2013-01-19 Jonathan Wakely <jwakely.gcc@gmail.com>
PR libstdc++/55861

View File

@ -59,14 +59,41 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
memory_order_seq_cst
} memory_order;
enum __memory_order_modifier
{
__memory_order_mask = 0x0ffff,
__memory_order_modifier_mask = 0xffff0000,
__memory_order_hle_acquire = 0x10000,
__memory_order_hle_release = 0x20000
};
constexpr memory_order
operator|(memory_order __m, __memory_order_modifier __mod)
{
return memory_order(__m | int(__mod));
}
constexpr memory_order
operator&(memory_order __m, __memory_order_modifier __mod)
{
return memory_order(__m & int(__mod));
}
// Drop release ordering as per [atomics.types.operations.req]/21
constexpr memory_order
__cmpexch_failure_order(memory_order __m) noexcept
__cmpexch_failure_order2(memory_order __m) noexcept
{
return __m == memory_order_acq_rel ? memory_order_acquire
: __m == memory_order_release ? memory_order_relaxed : __m;
}
constexpr memory_order
__cmpexch_failure_order(memory_order __m) noexcept
{
return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
| (__m & __memory_order_modifier_mask));
}
inline void
atomic_thread_fence(memory_order __m) noexcept
{ __atomic_thread_fence(__m); }
@ -268,9 +295,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
void
clear(memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_consume);
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_consume);
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
__atomic_clear (&_M_i, __m);
}
@ -278,9 +306,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
void
clear(memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_consume);
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_consume);
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
__atomic_clear (&_M_i, __m);
}
@ -431,9 +460,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
void
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
__atomic_store_n(&_M_i, __i, __m);
}
@ -442,9 +472,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
store(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
__atomic_store_n(&_M_i, __i, __m);
}
@ -452,8 +483,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__int_type
load(memory_order __m = memory_order_seq_cst) const noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
return __atomic_load_n(&_M_i, __m);
}
@ -461,8 +493,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__int_type
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
return __atomic_load_n(&_M_i, __m);
}
@ -486,9 +519,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
__glibcxx_assert(__m2 <= __m1);
memory_order __b2 = __m2 & __memory_order_mask;
memory_order __b1 = __m1 & __memory_order_mask;
__glibcxx_assert(__b2 != memory_order_release);
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
}
@ -498,9 +533,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
__glibcxx_assert(__m2 <= __m1);
memory_order __b2 = __m2 & __memory_order_mask;
memory_order __b1 = __m1 & __memory_order_mask;
__glibcxx_assert(__b2 != memory_order_release);
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
}
@ -525,9 +562,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
__glibcxx_assert(__m2 <= __m1);
memory_order __b2 = __m2 & __memory_order_mask;
memory_order __b1 = __m1 & __memory_order_mask;
__glibcxx_assert(__b2 != memory_order_release);
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
}
@ -537,9 +576,12 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
__glibcxx_assert(__m2 <= __m1);
memory_order __b2 = __m2 & __memory_order_mask;
memory_order __b1 = __m1 & __memory_order_mask;
__glibcxx_assert(__b2 != memory_order_release);
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
}
@ -726,9 +768,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
__atomic_store_n(&_M_p, __p, __m);
}
@ -737,9 +781,10 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
__glibcxx_assert(__m != memory_order_consume);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
__atomic_store_n(&_M_p, __p, __m);
}
@ -747,8 +792,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__pointer_type
load(memory_order __m = memory_order_seq_cst) const noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
return __atomic_load_n(&_M_p, __m);
}
@ -756,8 +802,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
__pointer_type
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
memory_order __b = __m & __memory_order_mask;
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
return __atomic_load_n(&_M_p, __m);
}
@ -782,9 +829,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
memory_order __m1,
memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
__glibcxx_assert(__m2 <= __m1);
memory_order __b2 = __m2 & __memory_order_mask;
memory_order __b1 = __m1 & __memory_order_mask;
__glibcxx_assert(__b2 != memory_order_release);
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
}
@ -794,9 +843,12 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
__glibcxx_assert(__m2 <= __m1);
memory_order __b2 = __m2 & __memory_order_mask;
memory_order __b1 = __m1 & __memory_order_mask;
__glibcxx_assert(__b2 != memory_order_release);
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
}

View File

@ -0,0 +1,120 @@
// { dg-options "-std=gnu++0x" }
// { dg-do compile { target i?86-*-* x86_64-*-* } }
// { dg-final { scan-assembler-times "\(xacquire\|\.byte.*0xf2\)" 14 } }
// { dg-final { scan-assembler-times "\(xrelease\|\.byte.*0xf3\)" 14 } }
// Copyright (C) 2008, 2009, 2013 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
#include <atomic>
#define ACQ memory_order_acquire | __memory_order_hle_acquire
#define REL memory_order_release | __memory_order_hle_release
int main()
{
unsigned zero, one;
using namespace std;
atomic_flag af = ATOMIC_FLAG_INIT;
if (!af.test_and_set(ACQ))
af.clear(REL);
atomic_uint au = ATOMIC_VAR_INIT(0);
if (au.exchange(1, ACQ))
au.store(0, REL);
if (au.exchange(1, ACQ))
au.exchange(0, REL);
zero = 0;
one = 1;
if (au.compare_exchange_weak(zero, 1, ACQ, memory_order_consume))
au.compare_exchange_weak(one, 0, REL, memory_order_consume);
zero = 0;
one = 1;
if (au.compare_exchange_strong(zero, 1, ACQ, memory_order_consume))
au.compare_exchange_strong(one, 0, REL, memory_order_consume);
if (!au.fetch_add(1, ACQ))
au.fetch_add(-1, REL);
if (!au.fetch_sub(1, ACQ))
au.fetch_sub(-1, REL);
#if 0 /* broken in underlying target */
if (!au.fetch_and(1, ACQ))
au.fetch_and(-1, REL);
if (!au.fetch_or(1, ACQ))
au.fetch_or(-1, REL);
if (!au.fetch_xor(1, ACQ))
au.fetch_xor(-1, REL);
if (!au.fetch_nand(1, ACQ))
au.fetch_nand(-1, REL);
#endif
volatile atomic_flag vaf = ATOMIC_FLAG_INIT;
if (!vaf.test_and_set(ACQ))
vaf.clear(REL);
volatile atomic_uint vau = ATOMIC_VAR_INIT(0);
if (!vau.exchange(1, ACQ))
vau.store(0, REL);
if (!vau.exchange(1, ACQ))
vau.exchange(0, REL);
zero = 0;
one = 1;
if (vau.compare_exchange_weak(zero, 1, ACQ, memory_order_consume))
vau.compare_exchange_weak(one, 0, REL, memory_order_consume);
zero = 0;
one = 1;
if (vau.compare_exchange_strong(zero, 1, ACQ, memory_order_consume))
vau.compare_exchange_strong(one, 0, REL, memory_order_consume);
if (!vau.fetch_add(1, ACQ))
vau.fetch_add(-1, REL);
if (!vau.fetch_sub(1, ACQ))
vau.fetch_sub(-1, REL);
#if 0 /* broken in underlying target */
if (!vau.fetch_and(1, ACQ))
vau.fetch_and(-1, REL);
if (!vau.fetch_or(1, ACQ))
vau.fetch_or(-1, REL);
if (!vau.fetch_xor(1, ACQ))
vau.fetch_xor(-1, REL);
if (!vau.fetch_nand(1, ACQ))
vau.fetch_nand(-1, REL);
#endif
return 0;
}