atomic.cc: Use noexcept.

2011-08-04  Paolo Carlini  <paolo.carlini@oracle.com>

	* src/atomic.cc: Use noexcept.
	* include/std/atomic: Likewise.
	* include/bits/atomic_0.h: Likewise.
	* include/bits/atomic_2.h: Likewise.
	* include/bits/atomic_base.h: Likewise.

From-SVN: r177413
This commit is contained in:
Paolo Carlini 2011-08-04 19:57:48 +00:00 committed by Paolo Carlini
parent c1ea7f07d9
commit bdc05efbc8
6 changed files with 437 additions and 380 deletions

View file

@ -1,3 +1,11 @@
2011-08-04 Paolo Carlini <paolo.carlini@oracle.com>
* src/atomic.cc: Use noexcept.
* include/std/atomic: Likewise.
* include/bits/atomic_0.h: Likewise.
* include/bits/atomic_2.h: Likewise.
* include/bits/atomic_base.h: Likewise.
2011-08-03 Benjamin Kosnik <bkoz@redhat.com>
* testsuite/performance/27_io/filebuf_sputn_unbuf.cc: Include

View file

@ -111,26 +111,26 @@ namespace __atomic0
/// atomic_flag
struct atomic_flag : public __atomic_flag_base
{
atomic_flag() = default;
~atomic_flag() = default;
atomic_flag() noexcept = default;
~atomic_flag() noexcept = default;
atomic_flag(const atomic_flag&) = delete;
atomic_flag& operator=(const atomic_flag&) = delete;
atomic_flag& operator=(const atomic_flag&) volatile = delete;
// Conversion to ATOMIC_FLAG_INIT.
atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
bool
test_and_set(memory_order __m = memory_order_seq_cst);
test_and_set(memory_order __m = memory_order_seq_cst) noexcept;
bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile;
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept;
void
clear(memory_order __m = memory_order_seq_cst);
clear(memory_order __m = memory_order_seq_cst) noexcept;
void
clear(memory_order __m = memory_order_seq_cst) volatile;
clear(memory_order __m = memory_order_seq_cst) volatile noexcept;
};
@ -166,117 +166,117 @@ namespace __atomic0
__int_type _M_i;
public:
__atomic_base() = default;
~__atomic_base() = default;
__atomic_base() noexcept = default;
~__atomic_base() noexcept = default;
__atomic_base(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) volatile = delete;
// Requires __int_type convertible to _M_base._M_i.
constexpr __atomic_base(__int_type __i): _M_i (__i) { }
constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
operator __int_type() const
operator __int_type() const noexcept
{ return load(); }
operator __int_type() const volatile
operator __int_type() const volatile noexcept
{ return load(); }
__int_type
operator=(__int_type __i)
operator=(__int_type __i) noexcept
{
store(__i);
return __i;
}
__int_type
operator=(__int_type __i) volatile
operator=(__int_type __i) volatile noexcept
{
store(__i);
return __i;
}
__int_type
operator++(int)
operator++(int) noexcept
{ return fetch_add(1); }
__int_type
operator++(int) volatile
operator++(int) volatile noexcept
{ return fetch_add(1); }
__int_type
operator--(int)
operator--(int) noexcept
{ return fetch_sub(1); }
__int_type
operator--(int) volatile
operator--(int) volatile noexcept
{ return fetch_sub(1); }
__int_type
operator++()
operator++() noexcept
{ return fetch_add(1) + 1; }
__int_type
operator++() volatile
operator++() volatile noexcept
{ return fetch_add(1) + 1; }
__int_type
operator--()
operator--() noexcept
{ return fetch_sub(1) - 1; }
__int_type
operator--() volatile
operator--() volatile noexcept
{ return fetch_sub(1) - 1; }
__int_type
operator+=(__int_type __i)
operator+=(__int_type __i) noexcept
{ return fetch_add(__i) + __i; }
__int_type
operator+=(__int_type __i) volatile
operator+=(__int_type __i) volatile noexcept
{ return fetch_add(__i) + __i; }
__int_type
operator-=(__int_type __i)
operator-=(__int_type __i) noexcept
{ return fetch_sub(__i) - __i; }
__int_type
operator-=(__int_type __i) volatile
operator-=(__int_type __i) volatile noexcept
{ return fetch_sub(__i) - __i; }
__int_type
operator&=(__int_type __i)
operator&=(__int_type __i) noexcept
{ return fetch_and(__i) & __i; }
__int_type
operator&=(__int_type __i) volatile
operator&=(__int_type __i) volatile noexcept
{ return fetch_and(__i) & __i; }
__int_type
operator|=(__int_type __i)
operator|=(__int_type __i) noexcept
{ return fetch_or(__i) | __i; }
__int_type
operator|=(__int_type __i) volatile
operator|=(__int_type __i) volatile noexcept
{ return fetch_or(__i) | __i; }
__int_type
operator^=(__int_type __i)
operator^=(__int_type __i) noexcept
{ return fetch_xor(__i) ^ __i; }
__int_type
operator^=(__int_type __i) volatile
operator^=(__int_type __i) volatile noexcept
{ return fetch_xor(__i) ^ __i; }
bool
is_lock_free() const
is_lock_free() const noexcept
{ return false; }
bool
is_lock_free() const volatile
is_lock_free() const volatile noexcept
{ return false; }
void
store(__int_type __i, memory_order __m = memory_order_seq_cst)
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -285,7 +285,8 @@ namespace __atomic0
}
void
store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
store(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -294,7 +295,7 @@ namespace __atomic0
}
__int_type
load(memory_order __m = memory_order_seq_cst) const
load(memory_order __m = memory_order_seq_cst) const noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -302,7 +303,7 @@ namespace __atomic0
}
__int_type
load(memory_order __m = memory_order_seq_cst) const volatile
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -310,16 +311,18 @@ namespace __atomic0
}
__int_type
exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return _ATOMIC_MODIFY_(this, =, __i, __m); }
__int_type
exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _ATOMIC_MODIFY_(this, =, __i, __m); }
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2)
memory_order __m1, memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -329,7 +332,8 @@ namespace __atomic0
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) volatile
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -339,7 +343,7 @@ namespace __atomic0
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst)
memory_order __m = memory_order_seq_cst) noexcept
{
return compare_exchange_weak(__i1, __i2, __m,
__calculate_memory_order(__m));
@ -347,7 +351,7 @@ namespace __atomic0
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return compare_exchange_weak(__i1, __i2, __m,
__calculate_memory_order(__m));
@ -355,7 +359,7 @@ namespace __atomic0
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2)
memory_order __m1, memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -365,7 +369,8 @@ namespace __atomic0
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) volatile
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -375,7 +380,7 @@ namespace __atomic0
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst)
memory_order __m = memory_order_seq_cst) noexcept
{
return compare_exchange_strong(__i1, __i2, __m,
__calculate_memory_order(__m));
@ -383,54 +388,60 @@ namespace __atomic0
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return compare_exchange_strong(__i1, __i2, __m,
__calculate_memory_order(__m));
}
__int_type
fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return _ATOMIC_MODIFY_(this, +=, __i, __m); }
__int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _ATOMIC_MODIFY_(this, +=, __i, __m); }
__int_type
fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return _ATOMIC_MODIFY_(this, -=, __i, __m); }
__int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _ATOMIC_MODIFY_(this, -=, __i, __m); }
__int_type
fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return _ATOMIC_MODIFY_(this, &=, __i, __m); }
__int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _ATOMIC_MODIFY_(this, &=, __i, __m); }
__int_type
fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return _ATOMIC_MODIFY_(this, |=, __i, __m); }
__int_type
fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _ATOMIC_MODIFY_(this, |=, __i, __m); }
__int_type
fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
__int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
};
@ -445,93 +456,95 @@ namespace __atomic0
__pointer_type _M_i;
public:
__atomic_base() = default;
~__atomic_base() = default;
__atomic_base() noexcept = default;
~__atomic_base() noexcept = default;
__atomic_base(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) volatile = delete;
// Requires __pointer_type convertible to _M_i.
constexpr __atomic_base(__return_pointer_type __p): _M_i (__p) { }
constexpr __atomic_base(__return_pointer_type __p) noexcept
: _M_i (__p) { }
operator __return_pointer_type() const
operator __return_pointer_type() const noexcept
{ return reinterpret_cast<__return_pointer_type>(load()); }
operator __return_pointer_type() const volatile
operator __return_pointer_type() const volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(load()); }
__return_pointer_type
operator=(__pointer_type __p)
operator=(__pointer_type __p) noexcept
{
store(__p);
return reinterpret_cast<__return_pointer_type>(__p);
}
__return_pointer_type
operator=(__pointer_type __p) volatile
operator=(__pointer_type __p) volatile noexcept
{
store(__p);
return reinterpret_cast<__return_pointer_type>(__p);
}
__return_pointer_type
operator++(int)
operator++(int) noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
__return_pointer_type
operator++(int) volatile
operator++(int) volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
__return_pointer_type
operator--(int)
operator--(int) noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
__return_pointer_type
operator--(int) volatile
operator--(int) volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
__return_pointer_type
operator++()
operator++() noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
__return_pointer_type
operator++() volatile
operator++() volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
__return_pointer_type
operator--()
operator--() noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
__return_pointer_type
operator--() volatile
operator--() volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
__return_pointer_type
operator+=(ptrdiff_t __d)
operator+=(ptrdiff_t __d) noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
__return_pointer_type
operator+=(ptrdiff_t __d) volatile
operator+=(ptrdiff_t __d) volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
__return_pointer_type
operator-=(ptrdiff_t __d)
operator-=(ptrdiff_t __d) noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
__return_pointer_type
operator-=(ptrdiff_t __d) volatile
operator-=(ptrdiff_t __d) volatile noexcept
{ return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
bool
is_lock_free() const
is_lock_free() const noexcept
{ return true; }
bool
is_lock_free() const volatile
is_lock_free() const volatile noexcept
{ return true; }
void
store(__pointer_type __p, memory_order __m = memory_order_seq_cst)
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -541,7 +554,7 @@ namespace __atomic0
void
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -556,7 +569,7 @@ namespace __atomic0
}
__return_pointer_type
load(memory_order __m = memory_order_seq_cst) const
load(memory_order __m = memory_order_seq_cst) const noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -565,7 +578,7 @@ namespace __atomic0
}
__return_pointer_type
load(memory_order __m = memory_order_seq_cst) const volatile
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -574,7 +587,8 @@ namespace __atomic0
}
__return_pointer_type
exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst)
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
void* __v = _ATOMIC_MODIFY_(this, =, __p, __m);
return reinterpret_cast<__return_pointer_type>(__v);
@ -582,7 +596,7 @@ namespace __atomic0
__return_pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
volatile __pointer_type* __p2 = &_M_i;
__typeof__(__p) __w = (__p);
@ -597,7 +611,7 @@ namespace __atomic0
bool
compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
memory_order __m1, memory_order __m2)
memory_order __m1, memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -608,7 +622,8 @@ namespace __atomic0
bool
compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
memory_order __m1, memory_order __m2) volatile
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -618,7 +633,8 @@ namespace __atomic0
}
__return_pointer_type
fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{
void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
return reinterpret_cast<__return_pointer_type>(__v);
@ -626,14 +642,15 @@ namespace __atomic0
__return_pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
return reinterpret_cast<__return_pointer_type>(__v);
}
__return_pointer_type
fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{
void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
return reinterpret_cast<__return_pointer_type>(__v);
@ -641,7 +658,7 @@ namespace __atomic0
__return_pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
return reinterpret_cast<__return_pointer_type>(__v);

View file

@ -48,17 +48,17 @@ namespace __atomic2
/// atomic_flag
struct atomic_flag : public __atomic_flag_base
{
atomic_flag() = default;
~atomic_flag() = default;
atomic_flag() noexcept = default;
~atomic_flag() noexcept = default;
atomic_flag(const atomic_flag&) = delete;
atomic_flag& operator=(const atomic_flag&) = delete;
atomic_flag& operator=(const atomic_flag&) volatile = delete;
// Conversion to ATOMIC_FLAG_INIT.
atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
bool
test_and_set(memory_order __m = memory_order_seq_cst)
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
{
// Redundant synchronize if built-in for lock is a full barrier.
if (__m != memory_order_acquire && __m != memory_order_acq_rel)
@ -67,7 +67,7 @@ namespace __atomic2
}
bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
{
// Redundant synchronize if built-in for lock is a full barrier.
if (__m != memory_order_acquire && __m != memory_order_acq_rel)
@ -76,7 +76,7 @@ namespace __atomic2
}
void
clear(memory_order __m = memory_order_seq_cst)
clear(memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_consume);
__glibcxx_assert(__m != memory_order_acquire);
@ -88,7 +88,7 @@ namespace __atomic2
}
void
clear(memory_order __m = memory_order_seq_cst) volatile
clear(memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_consume);
__glibcxx_assert(__m != memory_order_acquire);
@ -133,117 +133,117 @@ namespace __atomic2
__int_type _M_i;
public:
__atomic_base() = default;
~__atomic_base() = default;
__atomic_base() noexcept = default;
~__atomic_base() noexcept = default;
__atomic_base(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) volatile = delete;
// Requires __int_type convertible to _M_i.
constexpr __atomic_base(__int_type __i): _M_i (__i) { }
constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
operator __int_type() const
operator __int_type() const noexcept
{ return load(); }
operator __int_type() const volatile
operator __int_type() const volatile noexcept
{ return load(); }
__int_type
operator=(__int_type __i)
operator=(__int_type __i) noexcept
{
store(__i);
return __i;
}
__int_type
operator=(__int_type __i) volatile
operator=(__int_type __i) volatile noexcept
{
store(__i);
return __i;
}
__int_type
operator++(int)
operator++(int) noexcept
{ return fetch_add(1); }
__int_type
operator++(int) volatile
operator++(int) volatile noexcept
{ return fetch_add(1); }
__int_type
operator--(int)
operator--(int) noexcept
{ return fetch_sub(1); }
__int_type
operator--(int) volatile
operator--(int) volatile noexcept
{ return fetch_sub(1); }
__int_type
operator++()
operator++() noexcept
{ return __sync_add_and_fetch(&_M_i, 1); }
__int_type
operator++() volatile
operator++() volatile noexcept
{ return __sync_add_and_fetch(&_M_i, 1); }
__int_type
operator--()
operator--() noexcept
{ return __sync_sub_and_fetch(&_M_i, 1); }
__int_type
operator--() volatile
operator--() volatile noexcept
{ return __sync_sub_and_fetch(&_M_i, 1); }
__int_type
operator+=(__int_type __i)
operator+=(__int_type __i) noexcept
{ return __sync_add_and_fetch(&_M_i, __i); }
__int_type
operator+=(__int_type __i) volatile
operator+=(__int_type __i) volatile noexcept
{ return __sync_add_and_fetch(&_M_i, __i); }
__int_type
operator-=(__int_type __i)
operator-=(__int_type __i) noexcept
{ return __sync_sub_and_fetch(&_M_i, __i); }
__int_type
operator-=(__int_type __i) volatile
operator-=(__int_type __i) volatile noexcept
{ return __sync_sub_and_fetch(&_M_i, __i); }
__int_type
operator&=(__int_type __i)
operator&=(__int_type __i) noexcept
{ return __sync_and_and_fetch(&_M_i, __i); }
__int_type
operator&=(__int_type __i) volatile
operator&=(__int_type __i) volatile noexcept
{ return __sync_and_and_fetch(&_M_i, __i); }
__int_type
operator|=(__int_type __i)
operator|=(__int_type __i) noexcept
{ return __sync_or_and_fetch(&_M_i, __i); }
__int_type
operator|=(__int_type __i) volatile
operator|=(__int_type __i) volatile noexcept
{ return __sync_or_and_fetch(&_M_i, __i); }
__int_type
operator^=(__int_type __i)
operator^=(__int_type __i) noexcept
{ return __sync_xor_and_fetch(&_M_i, __i); }
__int_type
operator^=(__int_type __i) volatile
operator^=(__int_type __i) volatile noexcept
{ return __sync_xor_and_fetch(&_M_i, __i); }
bool
is_lock_free() const
is_lock_free() const noexcept
{ return true; }
bool
is_lock_free() const volatile
is_lock_free() const volatile noexcept
{ return true; }
void
store(__int_type __i, memory_order __m = memory_order_seq_cst)
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -261,7 +261,8 @@ namespace __atomic2
}
void
store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
store(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -279,7 +280,7 @@ namespace __atomic2
}
__int_type
load(memory_order __m = memory_order_seq_cst) const
load(memory_order __m = memory_order_seq_cst) const noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -291,7 +292,7 @@ namespace __atomic2
}
__int_type
load(memory_order __m = memory_order_seq_cst) const volatile
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -303,7 +304,8 @@ namespace __atomic2
}
__int_type
exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{
// XXX built-in assumes memory_order_acquire.
return __sync_lock_test_and_set(&_M_i, __i);
@ -311,7 +313,8 @@ namespace __atomic2
__int_type
exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
// XXX built-in assumes memory_order_acquire.
return __sync_lock_test_and_set(&_M_i, __i);
@ -319,17 +322,18 @@ namespace __atomic2
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2)
memory_order __m1, memory_order __m2) noexcept
{ return compare_exchange_strong(__i1, __i2, __m1, __m2); }
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) volatile
memory_order __m1,
memory_order __m2) volatile noexcept
{ return compare_exchange_strong(__i1, __i2, __m1, __m2); }
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst)
memory_order __m = memory_order_seq_cst) noexcept
{
return compare_exchange_weak(__i1, __i2, __m,
__calculate_memory_order(__m));
@ -337,7 +341,7 @@ namespace __atomic2
bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return compare_exchange_weak(__i1, __i2, __m,
__calculate_memory_order(__m));
@ -345,7 +349,7 @@ namespace __atomic2
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2)
memory_order __m1, memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -361,7 +365,8 @@ namespace __atomic2
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) volatile
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -377,7 +382,7 @@ namespace __atomic2
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst)
memory_order __m = memory_order_seq_cst) noexcept
{
return compare_exchange_strong(__i1, __i2, __m,
__calculate_memory_order(__m));
@ -385,55 +390,60 @@ namespace __atomic2
bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return compare_exchange_strong(__i1, __i2, __m,
__calculate_memory_order(__m));
}
__int_type
fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_add(&_M_i, __i); }
__int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_add(&_M_i, __i); }
__int_type
fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_sub(&_M_i, __i); }
__int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_sub(&_M_i, __i); }
__int_type
fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_and(&_M_i, __i); }
__int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_and(&_M_i, __i); }
__int_type
fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_or(&_M_i, __i); }
__int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_or(&_M_i, __i); }
__int_type
fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_xor(&_M_i, __i); }
__int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_xor(&_M_i, __i); }
};
@ -448,93 +458,94 @@ namespace __atomic2
__pointer_type _M_p;
public:
__atomic_base() = default;
~__atomic_base() = default;
__atomic_base() noexcept = default;
~__atomic_base() noexcept = default;
__atomic_base(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) = delete;
__atomic_base& operator=(const __atomic_base&) volatile = delete;
// Requires __pointer_type convertible to _M_p.
constexpr __atomic_base(__pointer_type __p): _M_p (__p) { }
constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
operator __pointer_type() const
operator __pointer_type() const noexcept
{ return load(); }
operator __pointer_type() const volatile
operator __pointer_type() const volatile noexcept
{ return load(); }
__pointer_type
operator=(__pointer_type __p)
operator=(__pointer_type __p) noexcept
{
store(__p);
return __p;
}
__pointer_type
operator=(__pointer_type __p) volatile
operator=(__pointer_type __p) volatile noexcept
{
store(__p);
return __p;
}
__pointer_type
operator++(int)
operator++(int) noexcept
{ return fetch_add(1); }
__pointer_type
operator++(int) volatile
operator++(int) volatile noexcept
{ return fetch_add(1); }
__pointer_type
operator--(int)
operator--(int) noexcept
{ return fetch_sub(1); }
__pointer_type
operator--(int) volatile
operator--(int) volatile noexcept
{ return fetch_sub(1); }
__pointer_type
operator++()
operator++() noexcept
{ return fetch_add(1) + 1; }
__pointer_type
operator++() volatile
operator++() volatile noexcept
{ return fetch_add(1) + 1; }
__pointer_type
operator--()
operator--() noexcept
{ return fetch_sub(1) -1; }
__pointer_type
operator--() volatile
operator--() volatile noexcept
{ return fetch_sub(1) -1; }
__pointer_type
operator+=(ptrdiff_t __d)
operator+=(ptrdiff_t __d) noexcept
{ return fetch_add(__d) + __d; }
__pointer_type
operator+=(ptrdiff_t __d) volatile
operator+=(ptrdiff_t __d) volatile noexcept
{ return fetch_add(__d) + __d; }
__pointer_type
operator-=(ptrdiff_t __d)
operator-=(ptrdiff_t __d) noexcept
{ return fetch_sub(__d) - __d; }
__pointer_type
operator-=(ptrdiff_t __d) volatile
operator-=(ptrdiff_t __d) volatile noexcept
{ return fetch_sub(__d) - __d; }
bool
is_lock_free() const
is_lock_free() const noexcept
{ return true; }
bool
is_lock_free() const volatile
is_lock_free() const volatile noexcept
{ return true; }
void
store(__pointer_type __p, memory_order __m = memory_order_seq_cst)
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -553,7 +564,7 @@ namespace __atomic2
void
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__glibcxx_assert(__m != memory_order_acquire);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -571,7 +582,7 @@ namespace __atomic2
}
__pointer_type
load(memory_order __m = memory_order_seq_cst) const
load(memory_order __m = memory_order_seq_cst) const noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -583,7 +594,7 @@ namespace __atomic2
}
__pointer_type
load(memory_order __m = memory_order_seq_cst) const volatile
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
__glibcxx_assert(__m != memory_order_release);
__glibcxx_assert(__m != memory_order_acq_rel);
@ -595,7 +606,8 @@ namespace __atomic2
}
__pointer_type
exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst)
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
// XXX built-in assumes memory_order_acquire.
return __sync_lock_test_and_set(&_M_p, __p);
@ -604,7 +616,7 @@ namespace __atomic2
__pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{
// XXX built-in assumes memory_order_acquire.
return __sync_lock_test_and_set(&_M_p, __p);
@ -612,7 +624,8 @@ namespace __atomic2
bool
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
memory_order __m1, memory_order __m2)
memory_order __m1,
memory_order __m2) noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -628,7 +641,8 @@ namespace __atomic2
bool
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
memory_order __m1, memory_order __m2) volatile
memory_order __m1,
memory_order __m2) volatile noexcept
{
__glibcxx_assert(__m2 != memory_order_release);
__glibcxx_assert(__m2 != memory_order_acq_rel);
@ -643,21 +657,23 @@ namespace __atomic2
}
__pointer_type
fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_add(&_M_p, __d); }
__pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_add(&_M_p, __d); }
__pointer_type
fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{ return __sync_fetch_and_sub(&_M_p, __d); }
__pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __sync_fetch_and_sub(&_M_p, __d); }
};

View file

@ -59,7 +59,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
} memory_order;
inline memory_order
__calculate_memory_order(memory_order __m)
__calculate_memory_order(memory_order __m) noexcept
{
const bool __cond1 = __m == memory_order_release;
const bool __cond2 = __m == memory_order_acq_rel;
@ -69,15 +69,15 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
}
void
atomic_thread_fence(memory_order);
atomic_thread_fence(memory_order) noexcept;
void
atomic_signal_fence(memory_order);
atomic_signal_fence(memory_order) noexcept;
/// kill_dependency
template<typename _Tp>
inline _Tp
kill_dependency(_Tp __y)
kill_dependency(_Tp __y) noexcept
{
_Tp __ret(__y);
return __ret;

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
// Support for atomic operations -*- C++ -*-
// Copyright (C) 2008, 2009, 2010
// Copyright (C) 2008, 2009, 2010, 2011
// Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
@ -56,7 +56,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
namespace __atomic0
{
bool
atomic_flag::test_and_set(memory_order)
atomic_flag::test_and_set(memory_order) noexcept
{
#if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1)
lock_guard<mutex> __lock(get_atomic_mutex());
@ -67,7 +67,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION
}
void
atomic_flag::clear(memory_order)
atomic_flag::clear(memory_order) noexcept
{
#if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1)
lock_guard<mutex> __lock(get_atomic_mutex());