LCOV - code coverage report
Current view: top level - src/include/port/atomics - generic.h (source / functions) Hit Total Coverage
Test: PostgreSQL Lines: 65 69 94.2 %
Date: 2017-09-29 15:12:54 Functions: 19 19 100.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*-------------------------------------------------------------------------
       2             :  *
       3             :  * generic.h
       4             :  *    Implement higher level operations based on some lower level atomic
       5             :  *    operations.
       6             :  *
       7             :  * Portions Copyright (c) 1996-2017, PostgreSQL Global Development Group
       8             :  * Portions Copyright (c) 1994, Regents of the University of California
       9             :  *
      10             :  * src/include/port/atomics/generic.h
      11             :  *
      12             :  *-------------------------------------------------------------------------
      13             :  */
      14             : 
      15             : /* intentionally no include guards, should only be included by atomics.h */
      16             : #ifndef INSIDE_ATOMICS_H
      17             : #   error "should be included via atomics.h"
      18             : #endif
      19             : 
      20             : /*
      21             :  * If read or write barriers are undefined, we upgrade them to full memory
      22             :  * barriers.
      23             :  */
      24             : #if !defined(pg_read_barrier_impl)
      25             : #   define pg_read_barrier_impl pg_memory_barrier_impl
      26             : #endif
      27             : #if !defined(pg_write_barrier_impl)
      28             : #   define pg_write_barrier_impl pg_memory_barrier_impl
      29             : #endif
      30             : 
      31             : #ifndef PG_HAVE_SPIN_DELAY
      32             : #define PG_HAVE_SPIN_DELAY
      33             : #define pg_spin_delay_impl()    ((void)0)
      34             : #endif
      35             : 
      36             : 
      37             : /* provide fallback */
      38             : #if !defined(PG_HAVE_ATOMIC_FLAG_SUPPORT) && defined(PG_HAVE_ATOMIC_U32_SUPPORT)
      39             : #define PG_HAVE_ATOMIC_FLAG_SUPPORT
      40             : typedef pg_atomic_uint32 pg_atomic_flag;
      41             : #endif
      42             : 
      43             : #ifndef PG_HAVE_ATOMIC_READ_U32
      44             : #define PG_HAVE_ATOMIC_READ_U32
      45             : static inline uint32
      46    27221544 : pg_atomic_read_u32_impl(volatile pg_atomic_uint32 *ptr)
      47             : {
      48    27221544 :     return *(&ptr->value);
      49             : }
      50             : #endif
      51             : 
      52             : #ifndef PG_HAVE_ATOMIC_WRITE_U32
      53             : #define PG_HAVE_ATOMIC_WRITE_U32
      54             : static inline void
      55      525729 : pg_atomic_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
      56             : {
      57      525729 :     ptr->value = val;
      58      525729 : }
      59             : #endif
      60             : 
      61             : #ifndef PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
      62             : #define PG_HAVE_ATOMIC_UNLOCKED_WRITE_U32
      63             : static inline void
      64       71730 : pg_atomic_unlocked_write_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val)
      65             : {
      66       71730 :     ptr->value = val;
      67       71730 : }
      68             : #endif
      69             : 
      70             : /*
      71             :  * provide fallback for test_and_set using atomic_exchange if available
      72             :  */
      73             : #if !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_EXCHANGE_U32)
      74             : 
      75             : #define PG_HAVE_ATOMIC_INIT_FLAG
      76             : static inline void
      77             : pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
      78             : {
      79             :     pg_atomic_write_u32_impl(ptr, 0);
      80             : }
      81             : 
      82             : #define PG_HAVE_ATOMIC_TEST_SET_FLAG
      83             : static inline bool
      84             : pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
      85             : {
      86             :     return pg_atomic_exchange_u32_impl(ptr, &value, 1) == 0;
      87             : }
      88             : 
      89             : #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
      90             : static inline bool
      91             : pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
      92             : {
      93             :     return pg_atomic_read_u32_impl(ptr) == 0;
      94             : }
      95             : 
      96             : 
      97             : #define PG_HAVE_ATOMIC_CLEAR_FLAG
      98             : static inline void
      99             : pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
     100             : {
     101             :     /* XXX: release semantics suffice? */
     102             :     pg_memory_barrier_impl();
     103             :     pg_atomic_write_u32_impl(ptr, 0);
     104             : }
     105             : 
     106             : /*
     107             :  * provide fallback for test_and_set using atomic_compare_exchange if
     108             :  * available.
     109             :  */
     110             : #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
     111             : 
     112             : #define PG_HAVE_ATOMIC_INIT_FLAG
     113             : static inline void
     114             : pg_atomic_init_flag_impl(volatile pg_atomic_flag *ptr)
     115             : {
     116             :     pg_atomic_write_u32_impl(ptr, 0);
     117             : }
     118             : 
     119             : #define PG_HAVE_ATOMIC_TEST_SET_FLAG
     120             : static inline bool
     121             : pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
     122             : {
     123             :     uint32 value = 0;
     124             :     return pg_atomic_compare_exchange_u32_impl(ptr, &value, 1);
     125             : }
     126             : 
     127             : #define PG_HAVE_ATOMIC_UNLOCKED_TEST_FLAG
     128             : static inline bool
     129             : pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
     130             : {
     131             :     return pg_atomic_read_u32_impl(ptr) == 0;
     132             : }
     133             : 
     134             : #define PG_HAVE_ATOMIC_CLEAR_FLAG
     135             : static inline void
     136             : pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
     137             : {
     138             :     /*
     139             :      * Use a memory barrier + plain write if we have a native memory
     140             :      * barrier. But don't do so if memory barriers use spinlocks - that'd lead
     141             :      * to circularity if flags are used to implement spinlocks.
     142             :      */
     143             : #ifndef PG_HAVE_MEMORY_BARRIER_EMULATION
     144             :     /* XXX: release semantics suffice? */
     145             :     pg_memory_barrier_impl();
     146             :     pg_atomic_write_u32_impl(ptr, 0);
     147             : #else
     148             :     uint32 value = 1;
     149             :     pg_atomic_compare_exchange_u32_impl(ptr, &value, 0);
     150             : #endif
     151             : }
     152             : 
     153             : #elif !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG)
     154             : #   error "No pg_atomic_test_and_set provided"
     155             : #endif /* !defined(PG_HAVE_ATOMIC_TEST_SET_FLAG) */
     156             : 
     157             : 
     158             : #ifndef PG_HAVE_ATOMIC_INIT_U32
     159             : #define PG_HAVE_ATOMIC_INIT_U32
     160             : static inline void
     161      203363 : pg_atomic_init_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 val_)
     162             : {
     163      203363 :     pg_atomic_write_u32_impl(ptr, val_);
     164      203363 : }
     165             : #endif
     166             : 
     167             : #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
     168             : #define PG_HAVE_ATOMIC_EXCHANGE_U32
     169             : static inline uint32
     170         397 : pg_atomic_exchange_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 xchg_)
     171             : {
     172             :     uint32 old;
     173             :     while (true)
     174             :     {
     175         397 :         old = pg_atomic_read_u32_impl(ptr);
     176         397 :         if (pg_atomic_compare_exchange_u32_impl(ptr, &old, xchg_))
     177         397 :             break;
     178           0 :     }
     179         397 :     return old;
     180             : }
     181             : #endif
     182             : 
     183             : #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
     184             : #define PG_HAVE_ATOMIC_FETCH_ADD_U32
     185             : static inline uint32
     186             : pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
     187             : {
     188             :     uint32 old;
     189             :     while (true)
     190             :     {
     191             :         old = pg_atomic_read_u32_impl(ptr);
     192             :         if (pg_atomic_compare_exchange_u32_impl(ptr, &old, old + add_))
     193             :             break;
     194             :     }
     195             :     return old;
     196             : }
     197             : #endif
     198             : 
     199             : #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
     200             : #define PG_HAVE_ATOMIC_FETCH_SUB_U32
     201             : static inline uint32
     202    14180332 : pg_atomic_fetch_sub_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
     203             : {
     204    14180332 :     return pg_atomic_fetch_add_u32_impl(ptr, -sub_);
     205             : }
     206             : #endif
     207             : 
     208             : #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
     209             : #define PG_HAVE_ATOMIC_FETCH_AND_U32
     210             : static inline uint32
     211     1394667 : pg_atomic_fetch_and_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 and_)
     212             : {
     213             :     uint32 old;
     214             :     while (true)
     215             :     {
     216     1394667 :         old = pg_atomic_read_u32_impl(ptr);
     217     1394667 :         if (pg_atomic_compare_exchange_u32_impl(ptr, &old, old & and_))
     218     1394658 :             break;
     219           9 :     }
     220     1394658 :     return old;
     221             : }
     222             : #endif
     223             : 
     224             : #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U32) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32)
     225             : #define PG_HAVE_ATOMIC_FETCH_OR_U32
     226             : static inline uint32
     227     1720607 : pg_atomic_fetch_or_u32_impl(volatile pg_atomic_uint32 *ptr, uint32 or_)
     228             : {
     229             :     uint32 old;
     230             :     while (true)
     231             :     {
     232     1720607 :         old = pg_atomic_read_u32_impl(ptr);
     233     1720607 :         if (pg_atomic_compare_exchange_u32_impl(ptr, &old, old | or_))
     234     1720462 :             break;
     235         145 :     }
     236     1720462 :     return old;
     237             : }
     238             : #endif
     239             : 
     240             : #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U32)
     241             : #define PG_HAVE_ATOMIC_ADD_FETCH_U32
     242             : static inline uint32
     243          47 : pg_atomic_add_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
     244             : {
     245          47 :     return pg_atomic_fetch_add_u32_impl(ptr, add_) + add_;
     246             : }
     247             : #endif
     248             : 
     249             : #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U32) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U32)
     250             : #define PG_HAVE_ATOMIC_SUB_FETCH_U32
     251             : static inline uint32
     252    14180330 : pg_atomic_sub_fetch_u32_impl(volatile pg_atomic_uint32 *ptr, int32 sub_)
     253             : {
     254    14180330 :     return pg_atomic_fetch_sub_u32_impl(ptr, sub_) - sub_;
     255             : }
     256             : #endif
     257             : 
     258             : #if !defined(PG_HAVE_ATOMIC_EXCHANGE_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
     259             : #define PG_HAVE_ATOMIC_EXCHANGE_U64
     260             : static inline uint64
     261          20 : pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
     262             : {
     263             :     uint64 old;
     264             :     while (true)
     265             :     {
     266          20 :         old = ptr->value;
     267          20 :         if (pg_atomic_compare_exchange_u64_impl(ptr, &old, xchg_))
     268          20 :             break;
     269           0 :     }
     270          20 :     return old;
     271             : }
     272             : #endif
     273             : 
     274             : #ifndef PG_HAVE_ATOMIC_WRITE_U64
     275             : #define PG_HAVE_ATOMIC_WRITE_U64
     276             : 
     277             : #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
     278             :     !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
     279             : 
     280             : static inline void
     281             : pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
     282             : {
     283             :     /*
     284             :      * On this platform aligned 64bit writes are guaranteed to be atomic,
     285             :      * except if using the fallback implementation, where can't guarantee the
     286             :      * required alignment.
     287             :      */
     288             :     AssertPointerAlignment(ptr, 8);
     289             :     ptr->value = val;
     290             : }
     291             : 
     292             : #else
     293             : 
     294             : static inline void
     295          18 : pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
     296             : {
     297             :     /*
     298             :      * 64 bit writes aren't safe on all platforms. In the generic
     299             :      * implementation implement them as an atomic exchange.
     300             :      */
     301          18 :     pg_atomic_exchange_u64_impl(ptr, val);
     302          18 : }
     303             : 
     304             : #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
     305             : #endif /* PG_HAVE_ATOMIC_WRITE_U64 */
     306             : 
     307             : #ifndef PG_HAVE_ATOMIC_READ_U64
     308             : #define PG_HAVE_ATOMIC_READ_U64
     309             : 
     310             : #if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
     311             :     !defined(PG_HAVE_ATOMIC_U64_SIMULATION)
     312             : 
     313             : static inline uint64
     314             : pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
     315             : {
     316             :     /*
     317             :      * On this platform aligned 64bit reads are guaranteed to be atomic,
     318             :      * except if using the fallback implementation, where can't guarantee the
     319             :      * required alignment.
     320             :      */
     321             :     AssertPointerAlignment(ptr, 8);
     322             :     return *(&ptr->value);
     323             : }
     324             : 
     325             : #else
     326             : 
     327             : static inline uint64
     328           9 : pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
     329             : {
     330           9 :     uint64 old = 0;
     331             : 
     332             :     /*
     333             :      * 64 bit reads aren't safe on all platforms. In the generic
     334             :      * implementation implement them as a compare/exchange with 0. That'll
     335             :      * fail or succeed, but always return the old value. Possible might store
     336             :      * a 0, but only if the prev. value also was a 0 - i.e. harmless.
     337             :      */
     338           9 :     pg_atomic_compare_exchange_u64_impl(ptr, &old, 0);
     339             : 
     340           9 :     return old;
     341             : }
     342             : #endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
     343             : #endif /* PG_HAVE_ATOMIC_READ_U64 */
     344             : 
     345             : #ifndef PG_HAVE_ATOMIC_INIT_U64
     346             : #define PG_HAVE_ATOMIC_INIT_U64
     347             : static inline void
     348          14 : pg_atomic_init_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val_)
     349             : {
     350          14 :     pg_atomic_write_u64_impl(ptr, val_);
     351          14 : }
     352             : #endif
     353             : 
     354             : #if !defined(PG_HAVE_ATOMIC_FETCH_ADD_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
     355             : #define PG_HAVE_ATOMIC_FETCH_ADD_U64
     356             : static inline uint64
     357             : pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
     358             : {
     359             :     uint64 old;
     360             :     while (true)
     361             :     {
     362             :         old = pg_atomic_read_u64_impl(ptr);
     363             :         if (pg_atomic_compare_exchange_u64_impl(ptr, &old, old + add_))
     364             :             break;
     365             :     }
     366             :     return old;
     367             : }
     368             : #endif
     369             : 
     370             : #if !defined(PG_HAVE_ATOMIC_FETCH_SUB_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
     371             : #define PG_HAVE_ATOMIC_FETCH_SUB_U64
     372             : static inline uint64
     373           2 : pg_atomic_fetch_sub_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
     374             : {
     375           2 :     return pg_atomic_fetch_add_u64_impl(ptr, -sub_);
     376             : }
     377             : #endif
     378             : 
     379             : #if !defined(PG_HAVE_ATOMIC_FETCH_AND_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
     380             : #define PG_HAVE_ATOMIC_FETCH_AND_U64
     381             : static inline uint64
     382           3 : pg_atomic_fetch_and_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 and_)
     383             : {
     384             :     uint64 old;
     385             :     while (true)
     386             :     {
     387           3 :         old = pg_atomic_read_u64_impl(ptr);
     388           3 :         if (pg_atomic_compare_exchange_u64_impl(ptr, &old, old & and_))
     389           3 :             break;
     390           0 :     }
     391           3 :     return old;
     392             : }
     393             : #endif
     394             : 
     395             : #if !defined(PG_HAVE_ATOMIC_FETCH_OR_U64) && defined(PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U64)
     396             : #define PG_HAVE_ATOMIC_FETCH_OR_U64
     397             : static inline uint64
     398           2 : pg_atomic_fetch_or_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 or_)
     399             : {
     400             :     uint64 old;
     401             :     while (true)
     402             :     {
     403           2 :         old = pg_atomic_read_u64_impl(ptr);
     404           2 :         if (pg_atomic_compare_exchange_u64_impl(ptr, &old, old | or_))
     405           2 :             break;
     406           0 :     }
     407           2 :     return old;
     408             : }
     409             : #endif
     410             : 
     411             : #if !defined(PG_HAVE_ATOMIC_ADD_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_ADD_U64)
     412             : #define PG_HAVE_ATOMIC_ADD_FETCH_U64
     413             : static inline uint64
     414           1 : pg_atomic_add_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
     415             : {
     416           1 :     return pg_atomic_fetch_add_u64_impl(ptr, add_) + add_;
     417             : }
     418             : #endif
     419             : 
     420             : #if !defined(PG_HAVE_ATOMIC_SUB_FETCH_U64) && defined(PG_HAVE_ATOMIC_FETCH_SUB_U64)
     421             : #define PG_HAVE_ATOMIC_SUB_FETCH_U64
     422             : static inline uint64
     423           1 : pg_atomic_sub_fetch_u64_impl(volatile pg_atomic_uint64 *ptr, int64 sub_)
     424             : {
     425           1 :     return pg_atomic_fetch_sub_u64_impl(ptr, sub_) - sub_;
     426             : }
     427             : #endif

Generated by: LCOV version 1.11