diff options
author | KJ Tsanaktsidis <[email protected]> | 2023-11-19 22:54:01 +1100 |
---|---|---|
committer | Koichi Sasada <[email protected]> | 2023-12-10 15:00:37 +0900 |
commit | aecbd66742f43ccfcac04ca4143fcc68ad834320 (patch) | |
tree | fe3eecf53d903ae62fd071cfa4820a654a56cf34 /include/ruby | |
parent | 6e8ad7497e459a1e64a3fdb4ab92d52546cff58c (diff) |
Add RUBY_ATOMIC_{PTR_,}FETCH macros for atomic loads
This can already be emulated by doing an atomic fetch_add of zero, but
this is more explicit.
[Bug #19994]
Diffstat (limited to 'include/ruby')
-rw-r--r-- | include/ruby/atomic.h | 51 |
1 files changed, 51 insertions, 0 deletions
diff --git a/include/ruby/atomic.h b/include/ruby/atomic.h index 3eb80fbf7d..eb106631f6 100644 --- a/include/ruby/atomic.h +++ b/include/ruby/atomic.h @@ -139,6 +139,15 @@ typedef unsigned int rb_atomic_t; rbimpl_atomic_cas(&(var), (oldval), (newval)) /** + * Atomic load. This loads `var` with an atomic intrinsic and returns + * its value. + * + * @param var A variable of ::rb_atomic_t + * @return What was stored in `var`j + */ +#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var)) + +/** * Identical to #RUBY_ATOMIC_EXCHANGE, except for the return type. * * @param var A variable of ::rb_atomic_t. @@ -280,6 +289,17 @@ typedef unsigned int rb_atomic_t; RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val)) /** + * Identical to #RUBY_ATOMIC_LOAD, except it expects its arguments are `void*`. + * There are cases where ::rb_atomic_t is 32bit while `void*` is 64bit. This + * should be used for size related operations to support such platforms. + * + * @param var A variable of `void*` + * @return The value of `var` (without tearing) + */ +#define RUBY_ATOMIC_PTR_LOAD(var) \ + RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var)) + +/** * Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `void*`. * There are cases where ::rb_atomic_t is 32bit while `void*` is 64bit. This * should be used for size related operations to support such platforms. @@ -749,6 +769,21 @@ rbimpl_atomic_value_exchange(volatile VALUE *ptr, VALUE val) RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NONNULL((1)) +static inline rb_atomic_t +rbimpl_atomic_load(volatile rb_atomic_t *ptr) +{ +#if 0 + +#elif defined(HAVE_GCC_ATOMIC_BUILTINS) + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); +#else + return rbimpl_atomic_fetch_add(ptr, 0); +#endif +} + +RBIMPL_ATTR_ARTIFICIAL() +RBIMPL_ATTR_NOALIAS() +RBIMPL_ATTR_NONNULL((1)) static inline void rbimpl_atomic_set(volatile rb_atomic_t *ptr, rb_atomic_t val) { @@ -875,6 +910,22 @@ rbimpl_atomic_ptr_cas(void **ptr, const void *oldval, const void *newval) RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NONNULL((1)) +static inline void * +rbimpl_atomic_ptr_load(void **ptr) +{ +#if 0 + +#elif defined(HAVE_GCC_ATOMIC_BUILTINS) + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); +#else + void *val = *ptr; + return rbimpl_atomic_ptr_cas(ptr, val, val); +#endif +} + +RBIMPL_ATTR_ARTIFICIAL() +RBIMPL_ATTR_NOALIAS() +RBIMPL_ATTR_NONNULL((1)) static inline VALUE rbimpl_atomic_value_cas(volatile VALUE *ptr, VALUE oldval, VALUE newval) { |