libstdc++
atomic_base.h
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file bits/atomic_base.h
00026  *  This is an internal header file, included by other library headers.
00027  *  Do not attempt to use it directly. @headername{atomic}
00028  */
00029 
00030 #ifndef _GLIBCXX_ATOMIC_BASE_H
00031 #define _GLIBCXX_ATOMIC_BASE_H 1
00032 
00033 #pragma GCC system_header
00034 
00035 #include <bits/c++config.h>
00036 #include <stdbool.h>
00037 #include <stdint.h>
00038 #include <bits/atomic_lockfree_defines.h>
00039 
00040 namespace std _GLIBCXX_VISIBILITY(default)
00041 {
00042 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00043 
00044   /**
00045    * @defgroup atomics Atomics
00046    *
00047    * Components for performing atomic operations.
00048    * @{
00049    */
00050 
00051   /// Enumeration for memory_order
00052   typedef enum memory_order
00053     {
00054       memory_order_relaxed,
00055       memory_order_consume,
00056       memory_order_acquire,
00057       memory_order_release,
00058       memory_order_acq_rel,
00059       memory_order_seq_cst
00060     } memory_order;
00061 
00062   // Drop release ordering as per [atomics.types.operations.req]/21
00063   constexpr memory_order
00064   __cmpexch_failure_order(memory_order __m) noexcept
00065   {
00066     return __m == memory_order_acq_rel ? memory_order_acquire
00067       : __m == memory_order_release ? memory_order_relaxed : __m;
00068   }
00069 
00070   inline void
00071   atomic_thread_fence(memory_order __m) noexcept
00072   { __atomic_thread_fence(__m); }
00073 
00074   inline void
00075   atomic_signal_fence(memory_order __m) noexcept
00076   { __atomic_thread_fence(__m); }
00077 
00078   /// kill_dependency
00079   template<typename _Tp>
00080     inline _Tp
00081     kill_dependency(_Tp __y) noexcept
00082     {
00083       _Tp __ret(__y);
00084       return __ret;
00085     }
00086 
00087 
00088   // Base types for atomics.
00089   template<typename _IntTp>
00090     struct __atomic_base;
00091 
00092   /// atomic_char
00093   typedef __atomic_base<char>               atomic_char;
00094 
00095   /// atomic_schar
00096   typedef __atomic_base<signed char>            atomic_schar;
00097 
00098   /// atomic_uchar
00099   typedef __atomic_base<unsigned char>      atomic_uchar;
00100 
00101   /// atomic_short
00102   typedef __atomic_base<short>          atomic_short;
00103 
00104   /// atomic_ushort
00105   typedef __atomic_base<unsigned short>     atomic_ushort;
00106 
00107   /// atomic_int
00108   typedef __atomic_base<int>                atomic_int;
00109 
00110   /// atomic_uint
00111   typedef __atomic_base<unsigned int>           atomic_uint;
00112 
00113   /// atomic_long
00114   typedef __atomic_base<long>               atomic_long;
00115 
00116   /// atomic_ulong
00117   typedef __atomic_base<unsigned long>      atomic_ulong;
00118 
00119   /// atomic_llong
00120   typedef __atomic_base<long long>          atomic_llong;
00121 
00122   /// atomic_ullong
00123   typedef __atomic_base<unsigned long long>     atomic_ullong;
00124 
00125   /// atomic_wchar_t
00126   typedef __atomic_base<wchar_t>        atomic_wchar_t;
00127 
00128   /// atomic_char16_t
00129   typedef __atomic_base<char16_t>       atomic_char16_t;
00130 
00131   /// atomic_char32_t
00132   typedef __atomic_base<char32_t>       atomic_char32_t;
00133 
00134   /// atomic_char32_t
00135   typedef __atomic_base<char32_t>       atomic_char32_t;
00136 
00137 
00138   /// atomic_int_least8_t
00139   typedef __atomic_base<int_least8_t>       atomic_int_least8_t;
00140 
00141   /// atomic_uint_least8_t
00142   typedef __atomic_base<uint_least8_t>          atomic_uint_least8_t;
00143 
00144   /// atomic_int_least16_t
00145   typedef __atomic_base<int_least16_t>          atomic_int_least16_t;
00146 
00147   /// atomic_uint_least16_t
00148   typedef __atomic_base<uint_least16_t>         atomic_uint_least16_t;
00149 
00150   /// atomic_int_least32_t
00151   typedef __atomic_base<int_least32_t>          atomic_int_least32_t;
00152 
00153   /// atomic_uint_least32_t
00154   typedef __atomic_base<uint_least32_t>         atomic_uint_least32_t;
00155 
00156   /// atomic_int_least64_t
00157   typedef __atomic_base<int_least64_t>          atomic_int_least64_t;
00158 
00159   /// atomic_uint_least64_t
00160   typedef __atomic_base<uint_least64_t>         atomic_uint_least64_t;
00161 
00162 
00163   /// atomic_int_fast8_t
00164   typedef __atomic_base<int_fast8_t>        atomic_int_fast8_t;
00165 
00166   /// atomic_uint_fast8_t
00167   typedef __atomic_base<uint_fast8_t>           atomic_uint_fast8_t;
00168 
00169   /// atomic_int_fast16_t
00170   typedef __atomic_base<int_fast16_t>           atomic_int_fast16_t;
00171 
00172   /// atomic_uint_fast16_t
00173   typedef __atomic_base<uint_fast16_t>          atomic_uint_fast16_t;
00174 
00175   /// atomic_int_fast32_t
00176   typedef __atomic_base<int_fast32_t>           atomic_int_fast32_t;
00177 
00178   /// atomic_uint_fast32_t
00179   typedef __atomic_base<uint_fast32_t>          atomic_uint_fast32_t;
00180 
00181   /// atomic_int_fast64_t
00182   typedef __atomic_base<int_fast64_t>           atomic_int_fast64_t;
00183 
00184   /// atomic_uint_fast64_t
00185   typedef __atomic_base<uint_fast64_t>          atomic_uint_fast64_t;
00186 
00187 
00188   /// atomic_intptr_t
00189   typedef __atomic_base<intptr_t>           atomic_intptr_t;
00190 
00191   /// atomic_uintptr_t
00192   typedef __atomic_base<uintptr_t>              atomic_uintptr_t;
00193 
00194   /// atomic_size_t
00195   typedef __atomic_base<size_t>             atomic_size_t;
00196 
00197   /// atomic_intmax_t
00198   typedef __atomic_base<intmax_t>           atomic_intmax_t;
00199 
00200   /// atomic_uintmax_t
00201   typedef __atomic_base<uintmax_t>              atomic_uintmax_t;
00202 
00203   /// atomic_ptrdiff_t
00204   typedef __atomic_base<ptrdiff_t>              atomic_ptrdiff_t;
00205 
00206 
00207 #define ATOMIC_VAR_INIT(_VI) { _VI }
00208 
00209   template<typename _Tp>
00210     struct atomic;
00211 
00212   template<typename _Tp>
00213     struct atomic<_Tp*>;
00214 
00215 
00216   /**
00217    *  @brief Base type for atomic_flag.
00218    *
00219    *  Base type is POD with data, allowing atomic_flag to derive from
00220    *  it and meet the standard layout type requirement. In addition to
00221    *  compatibilty with a C interface, this allows different
00222    *  implementations of atomic_flag to use the same atomic operation
00223    *  functions, via a standard conversion to the __atomic_flag_base
00224    *  argument.
00225   */
00226   _GLIBCXX_BEGIN_EXTERN_C
00227 
00228   struct __atomic_flag_base
00229   {
00230     /* The target's "set" value for test-and-set may not be exactly 1.  */
00231 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
00232     bool _M_i;
00233 #else
00234     unsigned char _M_i;
00235 #endif
00236   };
00237 
00238   _GLIBCXX_END_EXTERN_C
00239 
00240 #define ATOMIC_FLAG_INIT { 0 }
00241 
00242   /// atomic_flag
00243   struct atomic_flag : public __atomic_flag_base
00244   {
00245     atomic_flag() noexcept = default;
00246     ~atomic_flag() noexcept = default;
00247     atomic_flag(const atomic_flag&) = delete;
00248     atomic_flag& operator=(const atomic_flag&) = delete;
00249     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00250 
00251     // Conversion to ATOMIC_FLAG_INIT.
00252     constexpr atomic_flag(bool __i) noexcept
00253       : __atomic_flag_base({ __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0 })
00254     { }
00255 
00256     bool
00257     test_and_set(memory_order __m = memory_order_seq_cst) noexcept
00258     {
00259       return __atomic_test_and_set (&_M_i, __m);
00260     }
00261 
00262     bool
00263     test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
00264     {
00265       return __atomic_test_and_set (&_M_i, __m);
00266     }
00267 
00268     void
00269     clear(memory_order __m = memory_order_seq_cst) noexcept
00270     {
00271       __glibcxx_assert(__m != memory_order_consume);
00272       __glibcxx_assert(__m != memory_order_acquire);
00273       __glibcxx_assert(__m != memory_order_acq_rel);
00274 
00275       __atomic_clear (&_M_i, __m);
00276     }
00277 
00278     void
00279     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
00280     {
00281       __glibcxx_assert(__m != memory_order_consume);
00282       __glibcxx_assert(__m != memory_order_acquire);
00283       __glibcxx_assert(__m != memory_order_acq_rel);
00284 
00285       __atomic_clear (&_M_i, __m);
00286     }
00287   };
00288 
00289 
00290   /// Base class for atomic integrals.
00291   //
00292   // For each of the integral types, define atomic_[integral type] struct
00293   //
00294   // atomic_bool     bool
00295   // atomic_char     char
00296   // atomic_schar    signed char
00297   // atomic_uchar    unsigned char
00298   // atomic_short    short
00299   // atomic_ushort   unsigned short
00300   // atomic_int      int
00301   // atomic_uint     unsigned int
00302   // atomic_long     long
00303   // atomic_ulong    unsigned long
00304   // atomic_llong    long long
00305   // atomic_ullong   unsigned long long
00306   // atomic_char16_t char16_t
00307   // atomic_char32_t char32_t
00308   // atomic_wchar_t  wchar_t
00309   //
00310   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
00311   // 8 bytes, since that is what GCC built-in functions for atomic
00312   // memory access expect.
00313   template<typename _ITp>
00314     struct __atomic_base
00315     {
00316     private:
00317       typedef _ITp  __int_type;
00318 
00319       __int_type    _M_i;
00320 
00321     public:
00322       __atomic_base() noexcept = default;
00323       ~__atomic_base() noexcept = default;
00324       __atomic_base(const __atomic_base&) = delete;
00325       __atomic_base& operator=(const __atomic_base&) = delete;
00326       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00327 
00328       // Requires __int_type convertible to _M_i.
00329       constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
00330 
00331       operator __int_type() const noexcept
00332       { return load(); }
00333 
00334       operator __int_type() const volatile noexcept
00335       { return load(); }
00336 
00337       __int_type
00338       operator=(__int_type __i) noexcept
00339       {
00340     store(__i);
00341     return __i;
00342       }
00343 
00344       __int_type
00345       operator=(__int_type __i) volatile noexcept
00346       {
00347     store(__i);
00348     return __i;
00349       }
00350 
00351       __int_type
00352       operator++(int) noexcept
00353       { return fetch_add(1); }
00354 
00355       __int_type
00356       operator++(int) volatile noexcept
00357       { return fetch_add(1); }
00358 
00359       __int_type
00360       operator--(int) noexcept
00361       { return fetch_sub(1); }
00362 
00363       __int_type
00364       operator--(int) volatile noexcept
00365       { return fetch_sub(1); }
00366 
00367       __int_type
00368       operator++() noexcept
00369       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00370 
00371       __int_type
00372       operator++() volatile noexcept
00373       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00374 
00375       __int_type
00376       operator--() noexcept
00377       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00378 
00379       __int_type
00380       operator--() volatile noexcept
00381       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00382 
00383       __int_type
00384       operator+=(__int_type __i) noexcept
00385       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00386 
00387       __int_type
00388       operator+=(__int_type __i) volatile noexcept
00389       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00390 
00391       __int_type
00392       operator-=(__int_type __i) noexcept
00393       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00394 
00395       __int_type
00396       operator-=(__int_type __i) volatile noexcept
00397       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00398 
00399       __int_type
00400       operator&=(__int_type __i) noexcept
00401       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00402 
00403       __int_type
00404       operator&=(__int_type __i) volatile noexcept
00405       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00406 
00407       __int_type
00408       operator|=(__int_type __i) noexcept
00409       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00410 
00411       __int_type
00412       operator|=(__int_type __i) volatile noexcept
00413       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00414 
00415       __int_type
00416       operator^=(__int_type __i) noexcept
00417       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00418 
00419       __int_type
00420       operator^=(__int_type __i) volatile noexcept
00421       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00422 
00423       bool
00424       is_lock_free() const noexcept
00425       { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
00426 
00427       bool
00428       is_lock_free() const volatile noexcept
00429       { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
00430 
00431       void
00432       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
00433       {
00434     __glibcxx_assert(__m != memory_order_acquire);
00435     __glibcxx_assert(__m != memory_order_acq_rel);
00436     __glibcxx_assert(__m != memory_order_consume);
00437 
00438     __atomic_store_n(&_M_i, __i, __m);
00439       }
00440 
00441       void
00442       store(__int_type __i,
00443         memory_order __m = memory_order_seq_cst) volatile noexcept
00444       {
00445     __glibcxx_assert(__m != memory_order_acquire);
00446     __glibcxx_assert(__m != memory_order_acq_rel);
00447     __glibcxx_assert(__m != memory_order_consume);
00448 
00449     __atomic_store_n(&_M_i, __i, __m);
00450       }
00451 
00452       __int_type
00453       load(memory_order __m = memory_order_seq_cst) const noexcept
00454       {
00455     __glibcxx_assert(__m != memory_order_release);
00456     __glibcxx_assert(__m != memory_order_acq_rel);
00457 
00458     return __atomic_load_n(&_M_i, __m);
00459       }
00460 
00461       __int_type
00462       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00463       {
00464     __glibcxx_assert(__m != memory_order_release);
00465     __glibcxx_assert(__m != memory_order_acq_rel);
00466 
00467     return __atomic_load_n(&_M_i, __m);
00468       }
00469 
00470       __int_type
00471       exchange(__int_type __i,
00472            memory_order __m = memory_order_seq_cst) noexcept
00473       {
00474     return __atomic_exchange_n(&_M_i, __i, __m);
00475       }
00476 
00477 
00478       __int_type
00479       exchange(__int_type __i,
00480            memory_order __m = memory_order_seq_cst) volatile noexcept
00481       {
00482     return __atomic_exchange_n(&_M_i, __i, __m);
00483       }
00484 
00485       bool
00486       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00487                 memory_order __m1, memory_order __m2) noexcept
00488       {
00489     __glibcxx_assert(__m2 != memory_order_release);
00490     __glibcxx_assert(__m2 != memory_order_acq_rel);
00491     __glibcxx_assert(__m2 <= __m1);
00492 
00493     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00494       }
00495 
00496       bool
00497       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00498                 memory_order __m1,
00499                 memory_order __m2) volatile noexcept
00500       {
00501     __glibcxx_assert(__m2 != memory_order_release);
00502     __glibcxx_assert(__m2 != memory_order_acq_rel);
00503     __glibcxx_assert(__m2 <= __m1);
00504 
00505     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00506       }
00507 
00508       bool
00509       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00510                 memory_order __m = memory_order_seq_cst) noexcept
00511       {
00512     return compare_exchange_weak(__i1, __i2, __m,
00513                      __cmpexch_failure_order(__m));
00514       }
00515 
00516       bool
00517       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00518            memory_order __m = memory_order_seq_cst) volatile noexcept
00519       {
00520     return compare_exchange_weak(__i1, __i2, __m,
00521                      __cmpexch_failure_order(__m));
00522       }
00523 
00524       bool
00525       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00526                   memory_order __m1, memory_order __m2) noexcept
00527       {
00528     __glibcxx_assert(__m2 != memory_order_release);
00529     __glibcxx_assert(__m2 != memory_order_acq_rel);
00530     __glibcxx_assert(__m2 <= __m1);
00531 
00532     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00533       }
00534 
00535       bool
00536       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00537                   memory_order __m1,
00538                   memory_order __m2) volatile noexcept
00539       {
00540     __glibcxx_assert(__m2 != memory_order_release);
00541     __glibcxx_assert(__m2 != memory_order_acq_rel);
00542     __glibcxx_assert(__m2 <= __m1);
00543 
00544     return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00545       }
00546 
00547       bool
00548       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00549                   memory_order __m = memory_order_seq_cst) noexcept
00550       {
00551     return compare_exchange_strong(__i1, __i2, __m,
00552                        __cmpexch_failure_order(__m));
00553       }
00554 
00555       bool
00556       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00557          memory_order __m = memory_order_seq_cst) volatile noexcept
00558       {
00559     return compare_exchange_strong(__i1, __i2, __m,
00560                        __cmpexch_failure_order(__m));
00561       }
00562 
00563       __int_type
00564       fetch_add(__int_type __i,
00565         memory_order __m = memory_order_seq_cst) noexcept
00566       { return __atomic_fetch_add(&_M_i, __i, __m); }
00567 
00568       __int_type
00569       fetch_add(__int_type __i,
00570         memory_order __m = memory_order_seq_cst) volatile noexcept
00571       { return __atomic_fetch_add(&_M_i, __i, __m); }
00572 
00573       __int_type
00574       fetch_sub(__int_type __i,
00575         memory_order __m = memory_order_seq_cst) noexcept
00576       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00577 
00578       __int_type
00579       fetch_sub(__int_type __i,
00580         memory_order __m = memory_order_seq_cst) volatile noexcept
00581       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00582 
00583       __int_type
00584       fetch_and(__int_type __i,
00585         memory_order __m = memory_order_seq_cst) noexcept
00586       { return __atomic_fetch_and(&_M_i, __i, __m); }
00587 
00588       __int_type
00589       fetch_and(__int_type __i,
00590         memory_order __m = memory_order_seq_cst) volatile noexcept
00591       { return __atomic_fetch_and(&_M_i, __i, __m); }
00592 
00593       __int_type
00594       fetch_or(__int_type __i,
00595            memory_order __m = memory_order_seq_cst) noexcept
00596       { return __atomic_fetch_or(&_M_i, __i, __m); }
00597 
00598       __int_type
00599       fetch_or(__int_type __i,
00600            memory_order __m = memory_order_seq_cst) volatile noexcept
00601       { return __atomic_fetch_or(&_M_i, __i, __m); }
00602 
00603       __int_type
00604       fetch_xor(__int_type __i,
00605         memory_order __m = memory_order_seq_cst) noexcept
00606       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00607 
00608       __int_type
00609       fetch_xor(__int_type __i,
00610         memory_order __m = memory_order_seq_cst) volatile noexcept
00611       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00612     };
00613 
00614 
00615   /// Partial specialization for pointer types.
00616   template<typename _PTp>
00617     struct __atomic_base<_PTp*>
00618     {
00619     private:
00620       typedef _PTp*     __pointer_type;
00621 
00622       __pointer_type    _M_p;
00623 
00624       // Factored out to facilitate explicit specialization.
00625       constexpr ptrdiff_t
00626       _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
00627 
00628       constexpr ptrdiff_t
00629       _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
00630 
00631     public:
00632       __atomic_base() noexcept = default;
00633       ~__atomic_base() noexcept = default;
00634       __atomic_base(const __atomic_base&) = delete;
00635       __atomic_base& operator=(const __atomic_base&) = delete;
00636       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00637 
00638       // Requires __pointer_type convertible to _M_p.
00639       constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
00640 
00641       operator __pointer_type() const noexcept
00642       { return load(); }
00643 
00644       operator __pointer_type() const volatile noexcept
00645       { return load(); }
00646 
00647       __pointer_type
00648       operator=(__pointer_type __p) noexcept
00649       {
00650     store(__p);
00651     return __p;
00652       }
00653 
00654       __pointer_type
00655       operator=(__pointer_type __p) volatile noexcept
00656       {
00657     store(__p);
00658     return __p;
00659       }
00660 
00661       __pointer_type
00662       operator++(int) noexcept
00663       { return fetch_add(1); }
00664 
00665       __pointer_type
00666       operator++(int) volatile noexcept
00667       { return fetch_add(1); }
00668 
00669       __pointer_type
00670       operator--(int) noexcept
00671       { return fetch_sub(1); }
00672 
00673       __pointer_type
00674       operator--(int) volatile noexcept
00675       { return fetch_sub(1); }
00676 
00677       __pointer_type
00678       operator++() noexcept
00679       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00680                   memory_order_seq_cst); }
00681 
00682       __pointer_type
00683       operator++() volatile noexcept
00684       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00685                   memory_order_seq_cst); }
00686 
00687       __pointer_type
00688       operator--() noexcept
00689       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00690                   memory_order_seq_cst); }
00691 
00692       __pointer_type
00693       operator--() volatile noexcept
00694       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00695                   memory_order_seq_cst); }
00696 
00697       __pointer_type
00698       operator+=(ptrdiff_t __d) noexcept
00699       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00700                   memory_order_seq_cst); }
00701 
00702       __pointer_type
00703       operator+=(ptrdiff_t __d) volatile noexcept
00704       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00705                   memory_order_seq_cst); }
00706 
00707       __pointer_type
00708       operator-=(ptrdiff_t __d) noexcept
00709       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00710                   memory_order_seq_cst); }
00711 
00712       __pointer_type
00713       operator-=(ptrdiff_t __d) volatile noexcept
00714       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00715                   memory_order_seq_cst); }
00716 
00717       bool
00718       is_lock_free() const noexcept
00719       { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
00720 
00721       bool
00722       is_lock_free() const volatile noexcept
00723       { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
00724 
00725       void
00726       store(__pointer_type __p,
00727         memory_order __m = memory_order_seq_cst) noexcept
00728       {
00729     __glibcxx_assert(__m != memory_order_acquire);
00730     __glibcxx_assert(__m != memory_order_acq_rel);
00731     __glibcxx_assert(__m != memory_order_consume);
00732 
00733     __atomic_store_n(&_M_p, __p, __m);
00734       }
00735 
00736       void
00737       store(__pointer_type __p,
00738         memory_order __m = memory_order_seq_cst) volatile noexcept
00739       {
00740     __glibcxx_assert(__m != memory_order_acquire);
00741     __glibcxx_assert(__m != memory_order_acq_rel);
00742     __glibcxx_assert(__m != memory_order_consume);
00743 
00744     __atomic_store_n(&_M_p, __p, __m);
00745       }
00746 
00747       __pointer_type
00748       load(memory_order __m = memory_order_seq_cst) const noexcept
00749       {
00750     __glibcxx_assert(__m != memory_order_release);
00751     __glibcxx_assert(__m != memory_order_acq_rel);
00752 
00753     return __atomic_load_n(&_M_p, __m);
00754       }
00755 
00756       __pointer_type
00757       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00758       {
00759     __glibcxx_assert(__m != memory_order_release);
00760     __glibcxx_assert(__m != memory_order_acq_rel);
00761 
00762     return __atomic_load_n(&_M_p, __m);
00763       }
00764 
00765       __pointer_type
00766       exchange(__pointer_type __p,
00767            memory_order __m = memory_order_seq_cst) noexcept
00768       {
00769     return __atomic_exchange_n(&_M_p, __p, __m);
00770       }
00771 
00772 
00773       __pointer_type
00774       exchange(__pointer_type __p,
00775            memory_order __m = memory_order_seq_cst) volatile noexcept
00776       {
00777     return __atomic_exchange_n(&_M_p, __p, __m);
00778       }
00779 
00780       bool
00781       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00782                   memory_order __m1,
00783                   memory_order __m2) noexcept
00784       {
00785     __glibcxx_assert(__m2 != memory_order_release);
00786     __glibcxx_assert(__m2 != memory_order_acq_rel);
00787     __glibcxx_assert(__m2 <= __m1);
00788 
00789     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00790       }
00791 
00792       bool
00793       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00794                   memory_order __m1,
00795                   memory_order __m2) volatile noexcept
00796       {
00797     __glibcxx_assert(__m2 != memory_order_release);
00798     __glibcxx_assert(__m2 != memory_order_acq_rel);
00799     __glibcxx_assert(__m2 <= __m1);
00800 
00801     return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00802       }
00803 
00804       __pointer_type
00805       fetch_add(ptrdiff_t __d,
00806         memory_order __m = memory_order_seq_cst) noexcept
00807       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00808 
00809       __pointer_type
00810       fetch_add(ptrdiff_t __d,
00811         memory_order __m = memory_order_seq_cst) volatile noexcept
00812       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00813 
00814       __pointer_type
00815       fetch_sub(ptrdiff_t __d,
00816         memory_order __m = memory_order_seq_cst) noexcept
00817       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00818 
00819       __pointer_type
00820       fetch_sub(ptrdiff_t __d,
00821         memory_order __m = memory_order_seq_cst) volatile noexcept
00822       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00823     };
00824 
00825   // @} group atomics
00826 
00827 _GLIBCXX_END_NAMESPACE_VERSION
00828 } // namespace std
00829 
00830 #endif