b88907e62a
Commit e61061a088
added support for hardening
options in the toolchain. However this breaks the gcc5.5.0 compilation in
case FORTIFY_SOURCE is set different from FORTIFY_SOURCE_NONE as reported
in [1].
Fix this by backporting the upstream patch which fixes this in later gcc versions
[1] https://gcc.gnu.org/bugzilla/show_bug.cgi?format=multiple&id=61164
Signed-off-by: Hans Dedecker <dedeckeh@gmail.com>
1093 lines
38 KiB
Diff
1093 lines
38 KiB
Diff
From 55f12fce4ccf77513644a247f9c401a5b1fa2402 Mon Sep 17 00:00:00 2001
|
|
From: torvald <torvald@138bc75d-0d04-0410-961f-82ee72b054a4>
|
|
Date: Thu, 20 Aug 2015 17:55:24 +0000
|
|
Subject: [PATCH] libitm: Don't redefine __always_inline in local_atomic.
|
|
|
|
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@227040 138bc75d-0d04-0410-961f-82ee72b054a4
|
|
---
|
|
libitm/ChangeLog | 6 +
|
|
libitm/local_atomic | 299 ++++++++++++++++++++++----------------------
|
|
2 files changed, 155 insertions(+), 150 deletions(-)
|
|
|
|
diff --git a/libitm/ChangeLog b/libitm/ChangeLog
|
|
index 569d5bbbf14..6285c85fd44 100644
|
|
--- a/libitm/ChangeLog
|
|
+++ b/libitm/ChangeLog
|
|
@@ -1,3 +1,9 @@
|
|
+2015-08-20 Gleb Fotengauer-Malinovskiy <glebfm@altlinux.org> (tiny change)
|
|
+
|
|
+ PR libitm/61164
|
|
+ * local_atomic (__always_inline): Rename to...
|
|
+ (__libitm_always_inline): ... this.
|
|
+
|
|
2017-10-10 Release Manager
|
|
|
|
PR target/52482
|
|
diff --git a/libitm/local_atomic b/libitm/local_atomic
|
|
index 3119be40d09..e536275dc9f 100644
|
|
--- a/libitm/local_atomic
|
|
+++ b/libitm/local_atomic
|
|
@@ -41,8 +41,7 @@
|
|
#ifndef _GLIBCXX_ATOMIC
|
|
#define _GLIBCXX_ATOMIC 1
|
|
|
|
-#undef __always_inline
|
|
-#define __always_inline __attribute__((always_inline))
|
|
+#define __libitm_always_inline __attribute__((always_inline))
|
|
|
|
// #pragma GCC system_header
|
|
|
|
@@ -74,7 +73,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
memory_order_seq_cst
|
|
} memory_order;
|
|
|
|
- inline __always_inline memory_order
|
|
+ inline __libitm_always_inline memory_order
|
|
__calculate_memory_order(memory_order __m) noexcept
|
|
{
|
|
const bool __cond1 = __m == memory_order_release;
|
|
@@ -84,13 +83,13 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __mo2;
|
|
}
|
|
|
|
- inline __always_inline void
|
|
+ inline __libitm_always_inline void
|
|
atomic_thread_fence(memory_order __m) noexcept
|
|
{
|
|
__atomic_thread_fence (__m);
|
|
}
|
|
|
|
- inline __always_inline void
|
|
+ inline __libitm_always_inline void
|
|
atomic_signal_fence(memory_order __m) noexcept
|
|
{
|
|
__atomic_thread_fence (__m);
|
|
@@ -280,19 +279,19 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
// Conversion to ATOMIC_FLAG_INIT.
|
|
atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
return __atomic_test_and_set (&_M_i, __m);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
return __atomic_test_and_set (&_M_i, __m);
|
|
}
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
clear(memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_consume);
|
|
@@ -302,7 +301,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__atomic_clear (&_M_i, __m);
|
|
}
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
clear(memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_consume);
|
|
@@ -455,7 +454,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
is_lock_free() const volatile noexcept
|
|
{ return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_acquire);
|
|
@@ -465,7 +464,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__atomic_store_n(&_M_i, __i, __m);
|
|
}
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -476,7 +475,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__atomic_store_n(&_M_i, __i, __m);
|
|
}
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
load(memory_order __m = memory_order_seq_cst) const noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_release);
|
|
@@ -485,7 +484,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_load_n(&_M_i, __m);
|
|
}
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_release);
|
|
@@ -494,21 +493,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_load_n(&_M_i, __m);
|
|
}
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
exchange(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
return __atomic_exchange_n(&_M_i, __i, __m);
|
|
}
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
exchange(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
return __atomic_exchange_n(&_M_i, __i, __m);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2,
|
|
memory_order __m1, memory_order __m2) noexcept
|
|
{
|
|
@@ -519,7 +518,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2,
|
|
memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
@@ -531,7 +530,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
@@ -539,7 +538,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__int_type& __i1, __int_type __i2,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -547,7 +546,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2,
|
|
memory_order __m1, memory_order __m2) noexcept
|
|
{
|
|
@@ -558,7 +557,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2,
|
|
memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
@@ -570,7 +569,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
@@ -578,7 +577,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__int_type& __i1, __int_type __i2,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -586,52 +585,52 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_add(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_add(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_add(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_add(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_sub(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_sub(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_sub(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_sub(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_and(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_and(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_and(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_and(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_or(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_or(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_or(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_or(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_xor(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_xor(&_M_i, __i, __m); }
|
|
|
|
- __always_inline __int_type
|
|
+ __libitm_always_inline __int_type
|
|
fetch_xor(__int_type __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_xor(&_M_i, __i, __m); }
|
|
@@ -733,7 +732,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
is_lock_free() const volatile noexcept
|
|
{ return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
@@ -744,7 +743,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__atomic_store_n(&_M_p, __p, __m);
|
|
}
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -755,7 +754,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__atomic_store_n(&_M_p, __p, __m);
|
|
}
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
load(memory_order __m = memory_order_seq_cst) const noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_release);
|
|
@@ -764,7 +763,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_load_n(&_M_p, __m);
|
|
}
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
|
|
{
|
|
// __glibcxx_assert(__m != memory_order_release);
|
|
@@ -773,21 +772,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_load_n(&_M_p, __m);
|
|
}
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
exchange(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
return __atomic_exchange_n(&_M_p, __p, __m);
|
|
}
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
exchange(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
return __atomic_exchange_n(&_M_p, __p, __m);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m1,
|
|
memory_order __m2) noexcept
|
|
@@ -799,7 +798,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
@@ -811,22 +810,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
|
|
}
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_add(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_add(&_M_p, __d, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_add(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_add(&_M_p, __d, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_sub(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return __atomic_fetch_sub(&_M_p, __d, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_sub(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return __atomic_fetch_sub(&_M_p, __d, __m); }
|
|
@@ -870,67 +869,67 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
bool
|
|
is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
|
|
{ _M_base.store(__i, __m); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ _M_base.store(__i, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
load(memory_order __m = memory_order_seq_cst) const noexcept
|
|
{ return _M_base.load(__m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
|
|
{ return _M_base.load(__m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_base.exchange(__i, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
exchange(bool __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_base.exchange(__i, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
|
|
memory_order __m2) noexcept
|
|
{ return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
{ return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(bool& __i1, bool __i2,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_base.compare_exchange_weak(__i1, __i2, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(bool& __i1, bool __i2,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_base.compare_exchange_weak(__i1, __i2, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
|
|
memory_order __m2) noexcept
|
|
{ return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
{ return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(bool& __i1, bool __i2,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_base.compare_exchange_strong(__i1, __i2, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(bool& __i1, bool __i2,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_base.compare_exchange_strong(__i1, __i2, __m); }
|
|
@@ -980,11 +979,11 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
|
|
{ __atomic_store(&_M_i, &__i, _m); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
|
|
{ __atomic_store(&_M_i, &__i, _m); }
|
|
|
|
- __always_inline _Tp
|
|
+ __libitm_always_inline _Tp
|
|
load(memory_order _m = memory_order_seq_cst) const noexcept
|
|
{
|
|
_Tp tmp;
|
|
@@ -992,7 +991,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return tmp;
|
|
}
|
|
|
|
- __always_inline _Tp
|
|
+ __libitm_always_inline _Tp
|
|
load(memory_order _m = memory_order_seq_cst) const volatile noexcept
|
|
{
|
|
_Tp tmp;
|
|
@@ -1000,7 +999,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return tmp;
|
|
}
|
|
|
|
- __always_inline _Tp
|
|
+ __libitm_always_inline _Tp
|
|
exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
|
|
{
|
|
_Tp tmp;
|
|
@@ -1008,7 +1007,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return tmp;
|
|
}
|
|
|
|
- __always_inline _Tp
|
|
+ __libitm_always_inline _Tp
|
|
exchange(_Tp __i,
|
|
memory_order _m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -1017,50 +1016,50 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
return tmp;
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
|
|
memory_order __f) noexcept
|
|
{
|
|
return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
|
|
memory_order __f) volatile noexcept
|
|
{
|
|
return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(_Tp& __e, _Tp __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return compare_exchange_weak(__e, __i, __m, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(_Tp& __e, _Tp __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return compare_exchange_weak(__e, __i, __m, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
|
|
memory_order __f) noexcept
|
|
{
|
|
return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
|
|
memory_order __f) volatile noexcept
|
|
{
|
|
return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(_Tp& __e, _Tp __i,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return compare_exchange_strong(__e, __i, __m, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(_Tp& __e, _Tp __i,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return compare_exchange_strong(__e, __i, __m, __m); }
|
|
@@ -1153,46 +1152,46 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
is_lock_free() const volatile noexcept
|
|
{ return _M_b.is_lock_free(); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_b.store(__p, __m); }
|
|
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
store(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_b.store(__p, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
load(memory_order __m = memory_order_seq_cst) const noexcept
|
|
{ return _M_b.load(__m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
|
|
{ return _M_b.load(__m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
exchange(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_b.exchange(__p, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
exchange(__pointer_type __p,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_b.exchange(__p, __m); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m1, memory_order __m2) noexcept
|
|
{ return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
{ return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
@@ -1200,7 +1199,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -1208,18 +1207,18 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m1, memory_order __m2) noexcept
|
|
{ return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m1,
|
|
memory_order __m2) volatile noexcept
|
|
{ return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{
|
|
@@ -1227,7 +1226,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{
|
|
@@ -1235,22 +1234,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
__calculate_memory_order(__m));
|
|
}
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_add(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_b.fetch_add(__d, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_add(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_b.fetch_add(__d, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_sub(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) noexcept
|
|
{ return _M_b.fetch_sub(__d, __m); }
|
|
|
|
- __always_inline __pointer_type
|
|
+ __libitm_always_inline __pointer_type
|
|
fetch_sub(ptrdiff_t __d,
|
|
memory_order __m = memory_order_seq_cst) volatile noexcept
|
|
{ return _M_b.fetch_sub(__d, __m); }
|
|
@@ -1544,98 +1543,98 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
|
|
|
|
// Function definitions, atomic_flag operations.
|
|
- inline __always_inline bool
|
|
+ inline __libitm_always_inline bool
|
|
atomic_flag_test_and_set_explicit(atomic_flag* __a,
|
|
memory_order __m) noexcept
|
|
{ return __a->test_and_set(__m); }
|
|
|
|
- inline __always_inline bool
|
|
+ inline __libitm_always_inline bool
|
|
atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
|
|
memory_order __m) noexcept
|
|
{ return __a->test_and_set(__m); }
|
|
|
|
- inline __always_inline void
|
|
+ inline __libitm_always_inline void
|
|
atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
|
|
{ __a->clear(__m); }
|
|
|
|
- inline __always_inline void
|
|
+ inline __libitm_always_inline void
|
|
atomic_flag_clear_explicit(volatile atomic_flag* __a,
|
|
memory_order __m) noexcept
|
|
{ __a->clear(__m); }
|
|
|
|
- inline __always_inline bool
|
|
+ inline __libitm_always_inline bool
|
|
atomic_flag_test_and_set(atomic_flag* __a) noexcept
|
|
{ return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
|
|
|
|
- inline __always_inline bool
|
|
+ inline __libitm_always_inline bool
|
|
atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
|
|
{ return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
|
|
|
|
- inline __always_inline void
|
|
+ inline __libitm_always_inline void
|
|
atomic_flag_clear(atomic_flag* __a) noexcept
|
|
{ atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
|
|
|
|
- inline __always_inline void
|
|
+ inline __libitm_always_inline void
|
|
atomic_flag_clear(volatile atomic_flag* __a) noexcept
|
|
{ atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
|
|
|
|
|
|
// Function templates generally applicable to atomic types.
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
|
|
{ return __a->is_lock_free(); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
|
|
{ return __a->is_lock_free(); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
|
|
|
|
template<typename _ITp>
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
|
|
|
|
template<typename _ITp>
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ __a->store(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ __a->store(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
|
|
{ return __a->load(__m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_load_explicit(const volatile atomic<_ITp>* __a,
|
|
memory_order __m) noexcept
|
|
{ return __a->load(__m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->exchange(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->exchange(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2,
|
|
memory_order __m1,
|
|
@@ -1643,7 +1642,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
{ return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2,
|
|
memory_order __m1,
|
|
@@ -1651,7 +1650,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
{ return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2,
|
|
memory_order __m1,
|
|
@@ -1659,7 +1658,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
{ return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2,
|
|
memory_order __m1,
|
|
@@ -1668,37 +1667,37 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
|
|
|
|
template<typename _ITp>
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
|
|
{ atomic_store_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline void
|
|
+ __libitm_always_inline void
|
|
atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
|
|
{ atomic_store_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_load(const atomic<_ITp>* __a) noexcept
|
|
{ return atomic_load_explicit(__a, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_load(const volatile atomic<_ITp>* __a) noexcept
|
|
{ return atomic_load_explicit(__a, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_weak(atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2) noexcept
|
|
{
|
|
@@ -1708,7 +1707,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
}
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2) noexcept
|
|
{
|
|
@@ -1718,7 +1717,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
}
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_strong(atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2) noexcept
|
|
{
|
|
@@ -1728,7 +1727,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
}
|
|
|
|
template<typename _ITp>
|
|
- __always_inline bool
|
|
+ __libitm_always_inline bool
|
|
atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
|
|
_ITp* __i1, _ITp __i2) noexcept
|
|
{
|
|
@@ -1742,158 +1741,158 @@ namespace std // _GLIBCXX_VISIBILITY(default)
|
|
// intergral types as specified in the standard, excluding address
|
|
// types.
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_add(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_add(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_sub(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_sub(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_and(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_and(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_or(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_or(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_xor(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_xor(__i, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp
|
|
+ __libitm_always_inline _ITp
|
|
atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
|
|
{ return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
|
|
|
|
|
|
// Partial specializations for pointers.
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_add(__d, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_add(__d, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
|
|
{ return __a->fetch_add(__d); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
|
|
{ return __a->fetch_add(__d); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
|
|
ptrdiff_t __d, memory_order __m) noexcept
|
|
{ return __a->fetch_sub(__d, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
|
|
memory_order __m) noexcept
|
|
{ return __a->fetch_sub(__d, __m); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
|
|
{ return __a->fetch_sub(__d); }
|
|
|
|
template<typename _ITp>
|
|
- __always_inline _ITp*
|
|
+ __libitm_always_inline _ITp*
|
|
atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
|
|
{ return __a->fetch_sub(__d); }
|
|
// @} group atomics
|
|
--
|
|
2.19.2
|
|
|