From 2bc82a651428769e0b4bac33078f9e6a0c596971 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 17 Oct 2019 11:52:35 -0400 Subject: [PATCH 01/24] gh-108337: Add pyatomic.h header This adds a new header that provides atomic operations on common data types. The intention is that this will be exposed through Python.h, although that is not the case yet. The only immediate use is in the test file. --- Include/internal/pycore_atomic.h | 6 +- Include/pyatomic.h | 372 ++++++++ Include/pyatomic_gcc.h | 679 ++++++++++++++ Include/pyatomic_msc.h | 861 ++++++++++++++++++ Include/pyatomic_std.h | 799 ++++++++++++++++ Lib/test/test_capi/test_pyatomic.py | 15 + Makefile.pre.in | 3 + .../2023-08-22-13-00-54.bpo-108337.wceHZm.rst | 1 + Modules/Setup.stdlib.in | 2 +- Modules/_testcapi/parts.h | 1 + Modules/_testcapi/pyatomic.c | 165 ++++ Modules/_testcapimodule.c | 3 + PCbuild/_testcapi.vcxproj | 1 + PCbuild/_testcapi.vcxproj.filters | 3 + PCbuild/pythoncore.vcxproj | 2 + PCbuild/pythoncore.vcxproj.filters | 6 + 16 files changed, 2915 insertions(+), 4 deletions(-) create mode 100644 Include/pyatomic.h create mode 100644 Include/pyatomic_gcc.h create mode 100644 Include/pyatomic_msc.h create mode 100644 Include/pyatomic_std.h create mode 100644 Lib/test/test_capi/test_pyatomic.py create mode 100644 Misc/NEWS.d/next/C API/2023-08-22-13-00-54.bpo-108337.wceHZm.rst create mode 100644 Modules/_testcapi/pyatomic.c diff --git a/Include/internal/pycore_atomic.h b/Include/internal/pycore_atomic.h index 48d246ea08f3d9..22ce971a64f3df 100644 --- a/Include/internal/pycore_atomic.h +++ b/Include/internal/pycore_atomic.h @@ -1,5 +1,5 @@ -#ifndef Py_ATOMIC_H -#define Py_ATOMIC_H +#ifndef Py_INTERNAL_ATOMIC_H +#define Py_INTERNAL_ATOMIC_H #ifdef __cplusplus extern "C" { #endif @@ -554,4 +554,4 @@ typedef struct _Py_atomic_int { #ifdef __cplusplus } #endif -#endif /* Py_ATOMIC_H */ +#endif /* Py_INTERNAL_ATOMIC_H */ diff --git a/Include/pyatomic.h b/Include/pyatomic.h new file mode 100644 index 00000000000000..b8ad93562eb024 --- /dev/null +++ b/Include/pyatomic.h @@ -0,0 +1,372 @@ +#ifndef Py_ATOMIC_H +#define Py_ATOMIC_H + +static inline int +_Py_atomic_add_int(volatile int *address, int value); + +static inline int8_t +_Py_atomic_add_int8(volatile int8_t *address, int8_t value); + +static inline int16_t +_Py_atomic_add_int16(volatile int16_t *address, int16_t value); + +static inline int32_t +_Py_atomic_add_int32(volatile int32_t *address, int32_t value); + +static inline int64_t +_Py_atomic_add_int64(volatile int64_t *address, int64_t value); + +static inline intptr_t +_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value); + +static inline unsigned int +_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value); + +static inline uint8_t +_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value); + +static inline uint16_t +_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value); + +static inline uint32_t +_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value); + +static inline uint64_t +_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value); + +static inline uintptr_t +_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value); + +static inline Py_ssize_t +_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value); + + +static inline int +_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value); + +static inline int +_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value); + +static inline int +_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value); + +static inline int +_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value); + +static inline int +_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value); + +static inline int +_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value); + +static inline int +_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value); + +static inline int +_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value); + +static inline int +_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value); + +static inline int +_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value); + +static inline int +_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value); + +static inline int +_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value); + +static inline int +_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value); + +static inline int +_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value); + + +static inline int +_Py_atomic_exchange_int(volatile int *address, int value); + +static inline int8_t +_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value); + +static inline int16_t +_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value); + +static inline int32_t +_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value); + +static inline int64_t +_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value); + +static inline intptr_t +_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value); + +static inline unsigned int +_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value); + +static inline uint8_t +_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value); + +static inline uint16_t +_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value); + +static inline uint32_t +_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value); + +static inline uint64_t +_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value); + +static inline uintptr_t +_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value); + +static inline Py_ssize_t +_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value); + +static inline void * +_Py_atomic_exchange_ptr(volatile void *address, void *value); + + +static inline uint8_t +_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value); + +static inline uint16_t +_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value); + +static inline uint32_t +_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value); + +static inline uint64_t +_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value); + +static inline uintptr_t +_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value); + + +static inline uint8_t +_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value); + +static inline uint16_t +_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value); + +static inline uint32_t +_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value); + +static inline uint64_t +_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value); + +static inline uintptr_t +_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value); + + +static inline int +_Py_atomic_load_int(const volatile int *address); + +static inline int8_t +_Py_atomic_load_int8(const volatile int8_t *address); + +static inline int16_t +_Py_atomic_load_int16(const volatile int16_t *address); + +static inline int32_t +_Py_atomic_load_int32(const volatile int32_t *address); + +static inline int64_t +_Py_atomic_load_int64(const volatile int64_t *address); + +static inline intptr_t +_Py_atomic_load_intptr(const volatile intptr_t *address); + +static inline uint8_t +_Py_atomic_load_uint8(const volatile uint8_t *address); + +static inline uint16_t +_Py_atomic_load_uint16(const volatile uint16_t *address); + +static inline uint32_t +_Py_atomic_load_uint32(const volatile uint32_t *address); + +static inline uint64_t +_Py_atomic_load_uint64(const volatile uint64_t *address); + +static inline uintptr_t +_Py_atomic_load_uintptr(const volatile uintptr_t *address); + +static inline unsigned int +_Py_atomic_load_uint(const volatile unsigned int *address); + +static inline Py_ssize_t +_Py_atomic_load_ssize(const volatile Py_ssize_t *address); + +static inline void * +_Py_atomic_load_ptr(const volatile void *address); + + +static inline int +_Py_atomic_load_int_relaxed(const volatile int *address); + +static inline int8_t +_Py_atomic_load_int8_relaxed(const volatile int8_t *address); + +static inline int16_t +_Py_atomic_load_int16_relaxed(const volatile int16_t *address); + +static inline int32_t +_Py_atomic_load_int32_relaxed(const volatile int32_t *address); + +static inline int64_t +_Py_atomic_load_int64_relaxed(const volatile int64_t *address); + +static inline intptr_t +_Py_atomic_load_intptr_relaxed(const volatile intptr_t *address); + +static inline uint8_t +_Py_atomic_load_uint8_relaxed(const volatile uint8_t *address); + +static inline uint16_t +_Py_atomic_load_uint16_relaxed(const volatile uint16_t *address); + +static inline uint32_t +_Py_atomic_load_uint32_relaxed(const volatile uint32_t *address); + +static inline uint64_t +_Py_atomic_load_uint64_relaxed(const volatile uint64_t *address); + +static inline uintptr_t +_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t *address); + +static inline unsigned int +_Py_atomic_load_uint_relaxed(const volatile unsigned int *address); + +static inline Py_ssize_t +_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t *address); + +static inline void * +_Py_atomic_load_ptr_relaxed(const volatile void *address); + + +static inline void +_Py_atomic_store_int(volatile int *address, int value); + +static inline void +_Py_atomic_store_int8(volatile int8_t *address, int8_t value); + +static inline void +_Py_atomic_store_int16(volatile int16_t *address, int16_t value); + +static inline void +_Py_atomic_store_int32(volatile int32_t *address, int32_t value); + +static inline void +_Py_atomic_store_int64(volatile int64_t *address, int64_t value); + +static inline void +_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value); + +static inline void +_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value); + +static inline void +_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value); + +static inline void +_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value); + +static inline void +_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value); + +static inline void +_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value); + +static inline void +_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value); + +static inline void +_Py_atomic_store_ptr(volatile void *address, void *value); + +static inline void +_Py_atomic_store_ssize(volatile Py_ssize_t* address, Py_ssize_t value); + + +static inline void +_Py_atomic_store_int_relaxed(volatile int *address, int value); + +static inline void +_Py_atomic_store_int8_relaxed(volatile int8_t *address, int8_t value); + +static inline void +_Py_atomic_store_int16_relaxed(volatile int16_t *address, int16_t value); + +static inline void +_Py_atomic_store_int32_relaxed(volatile int32_t *address, int32_t value); + +static inline void +_Py_atomic_store_int64_relaxed(volatile int64_t *address, int64_t value); + +static inline void +_Py_atomic_store_intptr_relaxed(volatile intptr_t *address, intptr_t value); + +static inline void +_Py_atomic_store_uint8_relaxed(volatile uint8_t* address, uint8_t value); + +static inline void +_Py_atomic_store_uint16_relaxed(volatile uint16_t *address, uint16_t value); + +static inline void +_Py_atomic_store_uint32_relaxed(volatile uint32_t *address, uint32_t value); + +static inline void +_Py_atomic_store_uint64_relaxed(volatile uint64_t *address, uint64_t value); + +static inline void +_Py_atomic_store_uintptr_relaxed(volatile uintptr_t *address, uintptr_t value); + +static inline void +_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value); + +static inline void +_Py_atomic_store_ptr_relaxed(volatile void *address, void *value); + +static inline void +_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t *address, Py_ssize_t value); + + +static inline void +_Py_atomic_store_uint64_release(volatile uint64_t *address, uint64_t value); + +static inline void +_Py_atomic_store_ptr_release(volatile void *address, void *value); + + + static inline void +_Py_atomic_fence_seq_cst(void); + + static inline void +_Py_atomic_fence_release(void); + + +#ifndef _Py_USE_GCC_BUILTIN_ATOMICS +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)) +#define _Py_USE_GCC_BUILTIN_ATOMICS 1 +#elif defined(__clang__) +#if __has_builtin(__atomic_load) +#define _Py_USE_GCC_BUILTIN_ATOMICS 1 +#endif +#endif +#endif + +#if _Py_USE_GCC_BUILTIN_ATOMICS +#define Py_ATOMIC_GCC_H +#include "pyatomic_gcc.h" +#elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_ATOMICS__) +#define Py_ATOMIC_STD_H +#include "pyatomic_std.h" +#elif defined(_MSC_VER) +#define Py_ATOMIC_MSC_H +#include "pyatomic_msc.h" +#else +#error "define pyatomic for this platform" +#endif + +#endif /* Py_ATOMIC_H */ + diff --git a/Include/pyatomic_gcc.h b/Include/pyatomic_gcc.h new file mode 100644 index 00000000000000..64d917933d12da --- /dev/null +++ b/Include/pyatomic_gcc.h @@ -0,0 +1,679 @@ +#ifndef Py_ATOMIC_GCC_H +# error "this header file must not be included directly" +#endif + +// This is the implementation of Python atomic operations using GCC's built-in +// functions that match the C+11 memory model. This implementation is preferred +// for GCC compatible compilers, such as Clang. These functions are available in +// GCC 4.8+ without needing to compile with --std=c11 or --std=gnu11. + +static inline int +_Py_atomic_add_int(volatile int *address, int value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline unsigned int +_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline int8_t +_Py_atomic_add_int8(volatile int8_t *address, int8_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline int16_t +_Py_atomic_add_int16(volatile int16_t *address, int16_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline int32_t +_Py_atomic_add_int32(volatile int32_t *address, int32_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline int64_t +_Py_atomic_add_int64(volatile int64_t *address, int64_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline intptr_t +_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint8_t +_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint16_t +_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint32_t +_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint64_t +_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline uintptr_t +_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline Py_ssize_t +_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + + +static inline int +_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +{ + return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value) +{ + volatile void *e = expected; + return __atomic_compare_exchange_n((volatile void **)address, &e, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +} + + +static inline int +_Py_atomic_exchange_int(volatile int *address, int value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline int8_t +_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline int16_t +_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline int32_t +_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline int64_t +_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline intptr_t +_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline unsigned int +_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint8_t +_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint16_t +_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint32_t +_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint64_t +_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline uintptr_t +_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline Py_ssize_t +_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ + return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void * +_Py_atomic_exchange_ptr(volatile void *address, void *value) +{ + return __atomic_exchange_n((void **)address, value, __ATOMIC_SEQ_CST); +} + +static inline uint8_t +_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value) +{ + return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint16_t +_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value) +{ + return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint32_t +_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value) +{ + return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint64_t +_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) +{ + return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); +} + +static inline uintptr_t +_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint8_t +_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value) +{ + return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint16_t +_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value) +{ + return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint32_t +_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value) +{ + return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); +} + +static inline uint64_t +_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) +{ + return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); +} + +static inline uintptr_t +_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); +} + +static inline int +_Py_atomic_load_int(const volatile int *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline int8_t +_Py_atomic_load_int8(const volatile int8_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline int16_t +_Py_atomic_load_int16(const volatile int16_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline int32_t +_Py_atomic_load_int32(const volatile int32_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline int64_t +_Py_atomic_load_int64(const volatile int64_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline intptr_t +_Py_atomic_load_intptr(const volatile intptr_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline uint8_t +_Py_atomic_load_uint8(const volatile uint8_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline uint16_t +_Py_atomic_load_uint16(const volatile uint16_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline uint32_t +_Py_atomic_load_uint32(const volatile uint32_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline uint64_t +_Py_atomic_load_uint64(const volatile uint64_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline uintptr_t +_Py_atomic_load_uintptr(const volatile uintptr_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline unsigned int +_Py_atomic_load_uint(const volatile unsigned int *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline Py_ssize_t +_Py_atomic_load_ssize(const volatile Py_ssize_t *address) +{ + return __atomic_load_n(address, __ATOMIC_SEQ_CST); +} + +static inline void * +_Py_atomic_load_ptr(const volatile void *address) +{ + return (void *)__atomic_load_n((volatile void **)address, __ATOMIC_SEQ_CST); +} + + +static inline int +_Py_atomic_load_int_relaxed(const volatile int *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline int8_t +_Py_atomic_load_int8_relaxed(const volatile int8_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline int16_t +_Py_atomic_load_int16_relaxed(const volatile int16_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline int32_t +_Py_atomic_load_int32_relaxed(const volatile int32_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline int64_t +_Py_atomic_load_int64_relaxed(const volatile int64_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline intptr_t +_Py_atomic_load_intptr_relaxed(const volatile intptr_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline uint8_t +_Py_atomic_load_uint8_relaxed(const volatile uint8_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline uint16_t +_Py_atomic_load_uint16_relaxed(const volatile uint16_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline uint32_t +_Py_atomic_load_uint32_relaxed(const volatile uint32_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline uint64_t +_Py_atomic_load_uint64_relaxed(const volatile uint64_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline uintptr_t +_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline unsigned int +_Py_atomic_load_uint_relaxed(const volatile unsigned int *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline Py_ssize_t +_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t *address) +{ + return __atomic_load_n(address, __ATOMIC_RELAXED); +} + +static inline void * +_Py_atomic_load_ptr_relaxed(const volatile void *address) +{ + return (void *)__atomic_load_n((const volatile void **)address, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_int(volatile int *address, int value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_int8(volatile int8_t *address, int8_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_int16(volatile int16_t *address, int16_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_int32(volatile int32_t *address, int32_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_int64(volatile int64_t *address, int64_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_ptr(volatile void *address, void *value) +{ + __atomic_store_n((volatile void **)address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ + __atomic_store_n(address, value, __ATOMIC_SEQ_CST); +} + +static inline void +_Py_atomic_store_int_relaxed(volatile int *address, int value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_int8_relaxed(volatile int8_t *address, int8_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_int16_relaxed(volatile int16_t *address, int16_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_int32_relaxed(volatile int32_t *address, int32_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_int64_relaxed(volatile int64_t *address, int64_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_intptr_relaxed(volatile intptr_t *address, intptr_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_uint8_relaxed(volatile uint8_t *address, uint8_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_uint16_relaxed(volatile uint16_t *address, uint16_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_uint32_relaxed(volatile uint32_t *address, uint32_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_uint64_relaxed(volatile uint64_t *address, uint64_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_uintptr_relaxed(volatile uintptr_t *address, uintptr_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_ptr_relaxed(volatile void *address, void *value) +{ + __atomic_store_n((volatile void **)address, value, __ATOMIC_RELAXED); +} + +static inline void +_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t *address, Py_ssize_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELAXED); +} + + +static inline void +_Py_atomic_store_uint64_release(volatile uint64_t *address, uint64_t value) +{ + __atomic_store_n(address, value, __ATOMIC_RELEASE); +} + +static inline void +_Py_atomic_store_ptr_release(volatile void *address, void *value) +{ + __atomic_store_n((volatile void **)address, value, __ATOMIC_RELEASE); +} + + static inline void +_Py_atomic_fence_seq_cst(void) +{ + __atomic_thread_fence(__ATOMIC_SEQ_CST); +} + + static inline void +_Py_atomic_fence_release(void) +{ + __atomic_thread_fence(__ATOMIC_RELEASE); +} diff --git a/Include/pyatomic_msc.h b/Include/pyatomic_msc.h new file mode 100644 index 00000000000000..626bc7a84bdbf2 --- /dev/null +++ b/Include/pyatomic_msc.h @@ -0,0 +1,861 @@ +#ifndef Py_ATOMIC_MSC_H +# error "this header file must not be included directly" +#endif + +// This is the implementation of Python atomic operations for MSVC if the +// compiler does not support C11 or C++11 atomics. + +#include + + +static inline int +_Py_atomic_add_int(volatile int *address, int value) +{ + return (int)_InterlockedExchangeAdd((volatile long*)address, (long)value); +} + +static inline unsigned int +_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value) +{ + return (unsigned int)_InterlockedExchangeAdd((volatile long*)address, (long)value); +} + +static inline int8_t +_Py_atomic_add_int8(volatile int8_t *address, int8_t value) +{ + return (int8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); +} + +static inline int16_t +_Py_atomic_add_int16(volatile int16_t *address, int16_t value) +{ + return (int16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); +} + +static inline int32_t +_Py_atomic_add_int32(volatile int32_t *address, int32_t value) +{ + return (int32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); +} + +static inline int64_t +_Py_atomic_add_int64(volatile int64_t *address, int64_t value) +{ +#if defined(_M_X64) || defined(_M_ARM64) + return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); +#else + for (;;) { + int64_t old_value = *address; + int64_t new_value = old_value + value; + if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + return old_value; + } + } +#endif +} + +static inline intptr_t +_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) +{ +#if SIZEOF_VOID_P == 8 + return (intptr_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); +#else + return (intptr_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); +#endif +} + +static inline uint8_t +_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value) +{ + return (uint8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); +} + +static inline uint16_t +_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value) +{ + return (uint16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); +} + +static inline uint32_t +_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value) +{ + return (uint32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); +} + +static inline uint64_t +_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value) +{ + return (uint64_t)_Py_atomic_add_int64((volatile int64_t*)address, (int64_t)value); +} + +static inline uintptr_t +_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) +{ +#if SIZEOF_VOID_P == 8 + return (uintptr_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); +#else + return (uintptr_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); +#endif +} + +static inline Py_ssize_t +_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ +#if SIZEOF_SIZE_T == 8 + return (Py_ssize_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); +#else + return (Py_ssize_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); +#endif +} + + +static inline int +_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value) +{ + return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); +} + +static inline int +_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value) +{ + return (char)expected == _InterlockedCompareExchange8((volatile char*)address, (char)value, (char)expected); +} + +static inline int +_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value) +{ + return (short)expected == _InterlockedCompareExchange16((volatile short*)address, (short)value, (short)expected); +} + +static inline int +_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value) +{ + return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); +} + +static inline int +_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value) +{ + return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); +} + +static inline int +_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value) +{ + return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); +} + +static inline int +_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value) +{ + return (char)expected == _InterlockedCompareExchange8((volatile char*)address, (char)value, (char)expected); +} + +static inline int +_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value) +{ + return (short)expected == _InterlockedCompareExchange16((volatile short*)address, (short)value, (short)expected); +} + +static inline int +_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value) +{ + return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); +} + +static inline int +_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value) +{ + return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); +} + +static inline int +_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value) +{ + return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); +} + +static inline int +_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value) +{ + return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); +} + +static inline int +_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +{ +#if SIZEOF_SIZE_T == 8 + return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); +#else + return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); +#endif +} + +static inline int +_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value) +{ + return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); +} + +static inline int +_Py_atomic_exchange_int(volatile int *address, int value) +{ + return (int)_InterlockedExchange((volatile long*)address, (long)value); +} + +static inline int8_t +_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value) +{ + return (int8_t)_InterlockedExchange8((volatile char*)address, (char)value); +} + +static inline int16_t +_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value) +{ + return (int16_t)_InterlockedExchange16((volatile short*)address, (short)value); +} + +static inline int32_t +_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value) +{ + return (int32_t)_InterlockedExchange((volatile long*)address, (long)value); +} + +static inline int64_t +_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) +{ +#if defined(_M_X64) || defined(_M_ARM64) + return (int64_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); +#else + for (;;) { + int64_t old_value = *address; + int64_t new_value = value; + if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + return old_value; + } + } +#endif +} + +static inline intptr_t +_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value) +{ + return (intptr_t)_InterlockedExchangePointer((void * volatile *)address, (void *)value); +} + +static inline unsigned int +_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value) +{ + return (unsigned int)_InterlockedExchange((volatile long*)address, (long)value); +} + +static inline uint8_t +_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value) +{ + return (uint8_t)_InterlockedExchange8((volatile char*)address, (char)value); +} + +static inline uint16_t +_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value) +{ + return (uint16_t)_InterlockedExchange16((volatile short*)address, (short)value); +} + +static inline uint32_t +_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value) +{ + return (uint32_t)_InterlockedExchange((volatile long*)address, (long)value); +} + +static inline uint64_t +_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value) +{ + return (uint64_t)_Py_atomic_exchange_int64((volatile __int64*)address, (__int64)value); +} + +static inline uintptr_t +_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + return (uintptr_t)_InterlockedExchangePointer((void * volatile *)address, (void *)value); +} + +static inline Py_ssize_t +_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ +#if SIZEOF_SIZE_T == 8 + return (Py_ssize_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); +#else + return (Py_ssize_t)_InterlockedExchange((volatile long*)address, (long)value); +#endif +} + +static inline void * +_Py_atomic_exchange_ptr(volatile void *address, void *value) +{ + return (void *)_InterlockedExchangePointer((void * volatile *)address, (void *)value); +} + +static inline uint8_t +_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value) +{ + return (uint8_t)_InterlockedAnd8((volatile char*)address, (char)value); +} + +static inline uint16_t +_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value) +{ + return (uint16_t)_InterlockedAnd16((volatile short*)address, (short)value); +} + +static inline uint32_t +_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value) +{ + return (uint32_t)_InterlockedAnd((volatile long*)address, (long)value); +} + +static inline uint64_t +_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) +{ +#if defined(_M_X64) || defined(_M_ARM64) + return (uint64_t)_InterlockedAnd64((volatile __int64*)address, (__int64)value); +#else + for (;;) { + uint64_t old_value = *address; + uint64_t new_value = old_value & value; + if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + return old_value; + } + } +#endif +} + +static inline uintptr_t +_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) +{ +#if SIZEOF_VOID_P == 8 + return (uintptr_t)_InterlockedAnd64((volatile __int64*)address, (__int64)value); +#else + return (uintptr_t)_InterlockedAnd((volatile long*)address, (long)value); +#endif +} + +static inline uint8_t +_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value) +{ + return (uint8_t)_InterlockedOr8((volatile char*)address, (char)value); +} + +static inline uint16_t +_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value) +{ + return (uint16_t)_InterlockedOr16((volatile short*)address, (short)value); +} + +static inline uint32_t +_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value) +{ + return (uint32_t)_InterlockedOr((volatile long*)address, (long)value); +} + +static inline uint64_t +_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) +{ +#if defined(_M_X64) || defined(_M_ARM64) + return (uint64_t)_InterlockedOr64((volatile __int64*)address, (__int64)value); +#else + for (;;) { + uint64_t old_value = *address; + uint64_t new_value = old_value | value; + if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + return old_value; + } + } +#endif +} + +static inline uintptr_t +_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) +{ +#if SIZEOF_VOID_P == 8 + return (uintptr_t)_InterlockedOr64((volatile __int64*)address, (__int64)value); +#else + return (uintptr_t)_InterlockedOr((volatile long*)address, (long)value); +#endif +} + +static inline int +_Py_atomic_load_int(const volatile int *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return (int)__ldar32((unsigned __int32 volatile*)address); +#else +#error no implementation of _Py_atomic_load_int +#endif +} + +static inline int8_t +_Py_atomic_load_int8(const volatile int8_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return (int8_t)__ldar8((unsigned __int8 volatile*)address); +#else +#error no implementation of _Py_atomic_load_int8 +#endif +} + +static inline int16_t +_Py_atomic_load_int16(const volatile int16_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return (int16_t)__ldar16((unsigned __int16 volatile*)address); +#else +#error no implementation of _Py_atomic_load_int16 +#endif +} + +static inline int32_t +_Py_atomic_load_int32(const volatile int32_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return (int32_t)__ldar32((unsigned __int32 volatile*)address); +#else +#error no implementation of _Py_atomic_load_int32 +#endif +} + +static inline int64_t +_Py_atomic_load_int64(const volatile int64_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar64((unsigned __int64 volatile*)address); +#else +#error no implementation of _Py_atomic_load_int64 +#endif +} + +static inline intptr_t +_Py_atomic_load_intptr(const volatile intptr_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar64((unsigned __int64 volatile*)address); +#else +#error no implementation of _Py_atomic_load_intptr +#endif +} + +static inline uint8_t +_Py_atomic_load_uint8(const volatile uint8_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar8((unsigned __int8 volatile*)address); +#else +#error no implementation of _Py_atomic_load_uint8 +#endif +} + +static inline uint16_t +_Py_atomic_load_uint16(const volatile uint16_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar16((unsigned __int16 volatile*)address); +#else +#error no implementation of _Py_atomic_load_uint16 +#endif +} + +static inline uint32_t +_Py_atomic_load_uint32(const volatile uint32_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar32((unsigned __int32 volatile*)address); +#else +#error no implementation of _Py_atomic_load_uint32 +#endif +} + +static inline uint64_t +_Py_atomic_load_uint64(const volatile uint64_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar64((unsigned __int64 volatile*)address); +#else +#error no implementation of _Py_atomic_load_uint64 +#endif +} + +static inline uintptr_t +_Py_atomic_load_uintptr(const volatile uintptr_t *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar64((unsigned __int64 volatile*)address); +#else +#error no implementation of _Py_atomic_load_uintptr +#endif +} + +static inline unsigned int +_Py_atomic_load_uint(const volatile unsigned int *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar32((unsigned __int32 volatile*)address); +#else +#error no implementation of _Py_atomic_load_uint +#endif +} + +static inline Py_ssize_t +_Py_atomic_load_ssize(const volatile Py_ssize_t* address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *address; +#elif defined(_M_ARM64) + return __ldar64((unsigned __int64 volatile*)address); +#else +#error no implementation of _Py_atomic_load_ssize +#endif +} + +static inline void * +_Py_atomic_load_ptr(const volatile void *address) +{ +#if defined(_M_X64) || defined(_M_IX86) + return *(void* volatile*)address; +#elif defined(_M_ARM64) + return (void *)__ldar64((unsigned __int64 volatile*)address); +#else +#error no implementation of _Py_atomic_load_ptr +#endif +} + +static inline int +_Py_atomic_load_int_relaxed(const volatile int* address) +{ + return *address; +} + +static inline int8_t +_Py_atomic_load_int8_relaxed(const volatile int8_t* address) +{ + return *address; +} + +static inline int16_t +_Py_atomic_load_int16_relaxed(const volatile int16_t* address) +{ + return *address; +} + +static inline int32_t +_Py_atomic_load_int32_relaxed(const volatile int32_t* address) +{ + return *address; +} + +static inline int64_t +_Py_atomic_load_int64_relaxed(const volatile int64_t* address) +{ + return *address; +} + +static inline intptr_t +_Py_atomic_load_intptr_relaxed(const volatile intptr_t* address) +{ + return *address; +} + +static inline uint8_t +_Py_atomic_load_uint8_relaxed(const volatile uint8_t* address) +{ + return *address; +} + +static inline uint16_t +_Py_atomic_load_uint16_relaxed(const volatile uint16_t* address) +{ + return *address; +} + +static inline uint32_t +_Py_atomic_load_uint32_relaxed(const volatile uint32_t* address) +{ + return *address; +} + +static inline uint64_t +_Py_atomic_load_uint64_relaxed(const volatile uint64_t* address) +{ + return *address; +} + +static inline uintptr_t +_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t* address) +{ + return *address; +} + +static inline unsigned int +_Py_atomic_load_uint_relaxed(const volatile unsigned int *address) +{ + return *address; +} + +static inline Py_ssize_t +_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t* address) +{ + return *address; +} + +static inline void* +_Py_atomic_load_ptr_relaxed(const volatile void* address) +{ + return *(void * volatile *)address; +} + + + +static inline void +_Py_atomic_store_int(volatile int *address, int value) +{ + _InterlockedExchange((volatile long*)address, (long)value); +} + +static inline void +_Py_atomic_store_int8(volatile int8_t *address, int8_t value) +{ + _InterlockedExchange8((volatile char*)address, (char)value); +} + +static inline void +_Py_atomic_store_int16(volatile int16_t *address, int16_t value) +{ + _InterlockedExchange16((volatile short*)address, (short)value); +} + +static inline void +_Py_atomic_store_int32(volatile int32_t *address, int32_t value) +{ + _InterlockedExchange((volatile long*)address, (long)value); +} + +static inline void +_Py_atomic_store_int64(volatile int64_t *address, int64_t value) +{ + _Py_atomic_exchange_int64(address, value); +} + +static inline void +_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value) +{ + _InterlockedExchangePointer((void * volatile *)address, (void *)value); +} + +static inline void +_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value) +{ + _InterlockedExchange8((volatile char*)address, (char)value); +} + +static inline void +_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value) +{ + _InterlockedExchange16((volatile short*)address, (short)value); +} + +static inline void +_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value) +{ + _InterlockedExchange((volatile long*)address, (long)value); +} + +static inline void +_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value) +{ + _Py_atomic_exchange_int64((volatile __int64*)address, (__int64)value); +} + +static inline void +_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + _InterlockedExchangePointer((void * volatile *)address, (void *)value); +} + +static inline void +_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value) +{ + _InterlockedExchange((volatile long*)address, (long)value); +} + +static inline void +_Py_atomic_store_ptr(volatile void *address, void *value) +{ + _InterlockedExchangePointer((void * volatile *)address, (void *)value); +} + +static inline void +_Py_atomic_store_ssize(volatile Py_ssize_t* address, Py_ssize_t value) +{ +#if SIZEOF_SIZE_T == 8 + _InterlockedExchange64((volatile __int64*)address, (__int64)value); +#else + _InterlockedExchange((volatile long*)address, (long)value); +#endif +} + + +static inline void +_Py_atomic_store_int_relaxed(volatile int* address, int value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_int8_relaxed(volatile int8_t* address, int8_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_int16_relaxed(volatile int16_t* address, int16_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_int32_relaxed(volatile int32_t* address, int32_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_int64_relaxed(volatile int64_t* address, int64_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_intptr_relaxed(volatile intptr_t* address, intptr_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uint8_relaxed(volatile uint8_t* address, uint8_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uint16_relaxed(volatile uint16_t* address, uint16_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uint32_relaxed(volatile uint32_t* address, uint32_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uint64_relaxed(volatile uint64_t* address, uint64_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uintptr_relaxed(volatile uintptr_t* address, uintptr_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_ptr_relaxed(volatile void* address, void* value) +{ + *(void * volatile *)address = value; +} + +static inline void +_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t* address, Py_ssize_t value) +{ + *address = value; +} + +static inline void +_Py_atomic_store_uint64_release(volatile uint64_t* address, uint64_t value) +{ +#if defined(_M_X64) || defined(_M_IX86) + *address = value; +#elif defined(_M_ARM64) + __stlr64(address, value); +#else +#error no implementation of _Py_atomic_store_uint64_release +#endif +} + +static inline void +_Py_atomic_store_ptr_release(volatile void* address, void* value) +{ +#if defined(_M_X64) || defined(_M_IX86) + *(void * volatile *)address = value; +#elif defined(_M_ARM64) + __stlr64(address, (uintptr_t)value); +#else +#error no implementation of _Py_atomic_store_ptr_release +#endif +} + + static inline void +_Py_atomic_fence_seq_cst(void) +{ +#if defined(_M_ARM64) + __dmb(_ARM64_BARRIER_ISH); +#elif defined(_M_X64) + __faststorefence(); +#elif defined(_M_IX86) + _mm_mfence(); +#else +#error no implementation of _Py_atomic_fence_seq_cst +#endif +} + + static inline void +_Py_atomic_fence_release(void) +{ +#if defined(_M_ARM64) + __dmb(_ARM64_BARRIER_ISH); +#elif defined(_M_X64) || defined(_M_IX86) + _ReadWriteBarrier(); +#else +#error no implementation of _Py_atomic_fence_release +#endif +} diff --git a/Include/pyatomic_std.h b/Include/pyatomic_std.h new file mode 100644 index 00000000000000..afc4e76eed120d --- /dev/null +++ b/Include/pyatomic_std.h @@ -0,0 +1,799 @@ +#ifndef Py_ATOMIC_STD_H +# error "this header file must not be included directly" +#endif + +// This is the implementation of Python atomic operations using C++11 or C11 +// atomics. Note that the pyatomic_gcc.h implementation is preferred for GCC +// compatible compilers, even if they support C++11 atomics. + +#ifdef __cplusplus +extern "C++" { +#include +} +#define _Py_USING_STD using namespace std; +#define _Atomic(tp) atomic +#else +#include +#define _Py_USING_STD +#endif + + +static inline int +_Py_atomic_add_int(volatile int *address, int value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(int)*)address, value); +} + +static inline int8_t +_Py_atomic_add_int8(volatile int8_t *address, int8_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(int8_t)*)address, value); +} + +static inline int16_t +_Py_atomic_add_int16(volatile int16_t *address, int16_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(int16_t)*)address, value); +} + +static inline int32_t +_Py_atomic_add_int32(volatile int32_t *address, int32_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(int32_t)*)address, value); +} + +static inline int64_t +_Py_atomic_add_int64(volatile int64_t *address, int64_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(int64_t)*)address, value); +} + +static inline intptr_t +_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(intptr_t)*)address, value); +} + +static inline unsigned int +_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(unsigned int)*)address, value); +} + +static inline uint8_t +_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(uint8_t)*)address, value); +} + +static inline uint16_t +_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(uint16_t)*)address, value); +} + +static inline uint32_t +_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(uint32_t)*)address, value); +} + +static inline uint64_t +_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(uint64_t)*)address, value); +} + +static inline uintptr_t +_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(uintptr_t)*)address, value); +} + +static inline Py_ssize_t +_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ + _Py_USING_STD + return atomic_fetch_add((volatile _Atomic(Py_ssize_t)*)address, value); +} + +static inline int +_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(int)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(int8_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(int16_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(int32_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(int64_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(intptr_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(unsigned int)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(uint8_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(uint16_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(uint32_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(uint64_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(uintptr_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(Py_ssize_t)*)address, &expected, value); +} + +static inline int +_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value) +{ + _Py_USING_STD + return atomic_compare_exchange_strong((volatile _Atomic(void *)*)address, &expected, value); +} + + +static inline int +_Py_atomic_exchange_int(volatile int *address, int value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(int)*)address, value); +} + +static inline int8_t +_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(int8_t)*)address, value); +} + +static inline int16_t +_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(int16_t)*)address, value); +} + +static inline int32_t +_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(int32_t)*)address, value); +} + +static inline int64_t +_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(int64_t)*)address, value); +} + +static inline intptr_t +_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(intptr_t)*)address, value); +} + +static inline unsigned int +_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(unsigned int)*)address, value); +} + +static inline uint8_t +_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(uint8_t)*)address, value); +} + +static inline uint16_t +_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(uint16_t)*)address, value); +} + +static inline uint32_t +_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(uint32_t)*)address, value); +} + +static inline uint64_t +_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(uint64_t)*)address, value); +} + +static inline uintptr_t +_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(uintptr_t)*)address, value); +} + +static inline Py_ssize_t +_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(Py_ssize_t)*)address, value); +} + +static inline void * +_Py_atomic_exchange_ptr(volatile void *address, void *value) +{ + _Py_USING_STD + return atomic_exchange((volatile _Atomic(void *)*)address, value); +} + +static inline uint8_t +_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value) +{ + _Py_USING_STD + return atomic_fetch_and((volatile _Atomic(uint8_t)*)address, value); +} + +static inline uint16_t +_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value) +{ + _Py_USING_STD + return atomic_fetch_and((volatile _Atomic(uint16_t)*)address, value); +} + + +static inline uint32_t +_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value) +{ + _Py_USING_STD + return atomic_fetch_and((volatile _Atomic(uint32_t)*)address, value); +} + +static inline uint64_t +_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + return atomic_fetch_and((volatile _Atomic(uint64_t)*)address, value); +} + +static inline uintptr_t +_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + _Py_USING_STD + return atomic_fetch_and((volatile _Atomic(uintptr_t)*)address, value); +} + +static inline uint8_t +_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value) +{ + _Py_USING_STD + return atomic_fetch_or((volatile _Atomic(uint8_t)*)address, value); +} + +static inline uint16_t +_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value) +{ + _Py_USING_STD + return atomic_fetch_or((volatile _Atomic(uint16_t)*)address, value); +} + +static inline uint32_t +_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value) +{ + _Py_USING_STD + return atomic_fetch_or((volatile _Atomic(uint32_t)*)address, value); +} + +static inline uint64_t +_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + return atomic_fetch_or((volatile _Atomic(uint64_t)*)address, value); +} + +static inline uintptr_t +_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + _Py_USING_STD + return atomic_fetch_or((volatile _Atomic(uintptr_t)*)address, value); +} + +static inline int +_Py_atomic_load_int(const volatile int *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(int)*)address); +} + +static inline int8_t +_Py_atomic_load_int8(const volatile int8_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(int8_t)*)address); +} + +static inline int16_t +_Py_atomic_load_int16(const volatile int16_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(int16_t)*)address); +} + +static inline int32_t +_Py_atomic_load_int32(const volatile int32_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(int32_t)*)address); +} + +static inline int64_t +_Py_atomic_load_int64(const volatile int64_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(int64_t)*)address); +} + +static inline intptr_t +_Py_atomic_load_intptr(const volatile intptr_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(intptr_t)*)address); +} + +static inline uint8_t +_Py_atomic_load_uint8(const volatile uint8_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(uint8_t)*)address); +} + +static inline uint16_t +_Py_atomic_load_uint16(const volatile uint16_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(uint32_t)*)address); +} + +static inline uint32_t +_Py_atomic_load_uint32(const volatile uint32_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(uint32_t)*)address); +} + +static inline uint64_t +_Py_atomic_load_uint64(const volatile uint64_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(uint64_t)*)address); +} + +static inline uintptr_t +_Py_atomic_load_uintptr(const volatile uintptr_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(uintptr_t)*)address); +} + +static inline unsigned int +_Py_atomic_load_uint(const volatile unsigned int *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(unsigned int)*)address); +} + +static inline Py_ssize_t +_Py_atomic_load_ssize(const volatile Py_ssize_t *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(Py_ssize_t)*)address); +} + +static inline void * +_Py_atomic_load_ptr(const volatile void *address) +{ + _Py_USING_STD + return atomic_load((const volatile _Atomic(void*)*)address); +} + + +static inline int +_Py_atomic_load_int_relaxed(const volatile int *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(int)*)address, memory_order_relaxed); +} + +static inline int8_t +_Py_atomic_load_int8_relaxed(const volatile int8_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(int8_t)*)address, memory_order_relaxed); +} + +static inline int16_t +_Py_atomic_load_int16_relaxed(const volatile int16_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(int16_t)*)address, memory_order_relaxed); +} + +static inline int32_t +_Py_atomic_load_int32_relaxed(const volatile int32_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(int32_t)*)address, memory_order_relaxed); +} + +static inline int64_t +_Py_atomic_load_int64_relaxed(const volatile int64_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(int64_t)*)address, memory_order_relaxed); +} + +static inline intptr_t +_Py_atomic_load_intptr_relaxed(const volatile intptr_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(intptr_t)*)address, memory_order_relaxed); +} + +static inline uint8_t +_Py_atomic_load_uint8_relaxed(const volatile uint8_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(uint8_t)*)address, memory_order_relaxed); +} + +static inline uint16_t +_Py_atomic_load_uint16_relaxed(const volatile uint16_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(uint16_t)*)address, memory_order_relaxed); +} + +static inline uint32_t +_Py_atomic_load_uint32_relaxed(const volatile uint32_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(uint32_t)*)address, memory_order_relaxed); +} + +static inline uint64_t +_Py_atomic_load_uint64_relaxed(const volatile uint64_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(uint64_t)*)address, memory_order_relaxed); +} + +static inline uintptr_t +_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(uintptr_t)*)address, memory_order_relaxed); +} + +static inline unsigned int +_Py_atomic_load_uint_relaxed(const volatile unsigned int *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(unsigned int)*)address, memory_order_relaxed); +} + +static inline Py_ssize_t +_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(Py_ssize_t)*)address, memory_order_relaxed); +} + +static inline void * +_Py_atomic_load_ptr_relaxed(const volatile void *address) +{ + _Py_USING_STD + return atomic_load_explicit((const volatile _Atomic(void*)*)address, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_int(volatile int *address, int value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(int)*)address, value); +} + +static inline void +_Py_atomic_store_int8(volatile int8_t *address, int8_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(int8_t)*)address, value); +} + +static inline void +_Py_atomic_store_int16(volatile int16_t *address, int16_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(int16_t)*)address, value); +} + +static inline void +_Py_atomic_store_int32(volatile int32_t *address, int32_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(int32_t)*)address, value); +} + +static inline void +_Py_atomic_store_int64(volatile int64_t *address, int64_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(int64_t)*)address, value); +} + +static inline void +_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(intptr_t)*)address, value); +} + +static inline void +_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(uint8_t)*)address, value); +} + +static inline void +_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(uint16_t)*)address, value); +} + +static inline void +_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(uint32_t)*)address, value); +} + +static inline void +_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(uint64_t)*)address, value); +} + +static inline void +_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(uintptr_t)*)address, value); +} + +static inline void +_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(unsigned int)*)address, value); +} + +static inline void +_Py_atomic_store_ptr(volatile void *address, void *value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(void*)*)address, value); +} + +static inline void +_Py_atomic_store_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +{ + _Py_USING_STD + atomic_store((volatile _Atomic(Py_ssize_t)*)address, value); +} + +static inline void +_Py_atomic_store_int_relaxed(volatile int *address, int value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(int)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_int8_relaxed(volatile int8_t *address, int8_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(int8_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_int16_relaxed(volatile int16_t *address, int16_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(int16_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_int32_relaxed(volatile int32_t *address, int32_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(int32_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_int64_relaxed(volatile int64_t *address, int64_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(int64_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_intptr_relaxed(volatile intptr_t *address, intptr_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(intptr_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uint8_relaxed(volatile uint8_t *address, uint8_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(uint8_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uint16_relaxed(volatile uint16_t *address, uint16_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(uint16_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uint32_relaxed(volatile uint32_t *address, uint32_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(uint32_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uint64_relaxed(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(uint64_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uintptr_relaxed(volatile uintptr_t *address, uintptr_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(uintptr_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(unsigned int)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_ptr_relaxed(volatile void *address, void *value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(void*)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t *address, Py_ssize_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(Py_ssize_t)*)address, value, memory_order_relaxed); +} + +static inline void +_Py_atomic_store_uint64_release(volatile uint64_t *address, uint64_t value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(uint64_t)*)address, value, memory_order_release); +} + +static inline void +_Py_atomic_store_ptr_release(volatile void *address, void *value) +{ + _Py_USING_STD + atomic_store_explicit((volatile _Atomic(void*)*)address, value, memory_order_release); +} + + static inline void +_Py_atomic_fence_seq_cst(void) +{ + _Py_USING_STD + atomic_thread_fence(memory_order_seq_cst); +} + + static inline void +_Py_atomic_fence_release(void) +{ + _Py_USING_STD + atomic_thread_fence(memory_order_release); +} diff --git a/Lib/test/test_capi/test_pyatomic.py b/Lib/test/test_capi/test_pyatomic.py new file mode 100644 index 00000000000000..846d6d50c25969 --- /dev/null +++ b/Lib/test/test_capi/test_pyatomic.py @@ -0,0 +1,15 @@ +import unittest +from test.support import import_helper + +# Skip this test if the _testcapi module isn't available. +_testcapi = import_helper.import_module('_testcapi') + +class PyAtomicTests(unittest.TestCase): + pass + +for name in sorted(dir(_testcapi)): + if name.startswith('test_atomic'): + setattr(PyAtomicTests, name, getattr(_testcapi, name)) + +if __name__ == "__main__": + unittest.main() diff --git a/Makefile.pre.in b/Makefile.pre.in index 9be5c3b50eb9ee..04b230b15b8a4d 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1654,6 +1654,9 @@ PYTHON_HEADERS= \ $(srcdir)/Include/osdefs.h \ $(srcdir)/Include/osmodule.h \ $(srcdir)/Include/patchlevel.h \ + $(srcdir)/Include/pyatomic.h \ + $(srcdir)/Include/pyatomic_gcc.h \ + $(srcdir)/Include/pyatomic_std.h \ $(srcdir)/Include/pybuffer.h \ $(srcdir)/Include/pycapsule.h \ $(srcdir)/Include/pydtrace.h \ diff --git a/Misc/NEWS.d/next/C API/2023-08-22-13-00-54.bpo-108337.wceHZm.rst b/Misc/NEWS.d/next/C API/2023-08-22-13-00-54.bpo-108337.wceHZm.rst new file mode 100644 index 00000000000000..476123a051bb3f --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-08-22-13-00-54.bpo-108337.wceHZm.rst @@ -0,0 +1 @@ +Add atomic operations on additional data types in pyatomic.h. diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 689f1d42ef0eee..e913ee405573d3 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -159,7 +159,7 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyos.c _testcapi/immortal.c _testcapi/heaptype_relative.c _testcapi/gc.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/pyos.c _testcapi/immortal.c _testcapi/heaptype_relative.c _testcapi/gc.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 65ebf80bcd1e95..8e51b064b7531f 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -40,6 +40,7 @@ int _PyTestCapi_Init_Structmember(PyObject *module); int _PyTestCapi_Init_Exceptions(PyObject *module); int _PyTestCapi_Init_Code(PyObject *module); int _PyTestCapi_Init_Buffer(PyObject *module); +int _PyTestCapi_Init_PyAtomic(PyObject *module); int _PyTestCapi_Init_PyOS(PyObject *module); int _PyTestCapi_Init_Immortal(PyObject *module); int _PyTestCapi_Init_GC(PyObject *mod); diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c new file mode 100644 index 00000000000000..f435dc55ac26b9 --- /dev/null +++ b/Modules/_testcapi/pyatomic.c @@ -0,0 +1,165 @@ +/* + * C Extension module to smoke test pyatomic.h API. + * + * This only tests basic functionality, not any synchronizing ordering. + */ + +/* Always enable assertions */ +#undef NDEBUG + +#include "Python.h" +#include "pyatomic.h" +#include "parts.h" + +// We define atomic bitwise operations on these types +#define FOR_BITWISE_TYPES(V) \ + V(uint8, uint8_t) \ + V(uint16, uint16_t) \ + V(uint32, uint32_t) \ + V(uint64, uint64_t) \ + V(uintptr, uintptr_t) + +// We define atomic addition on these types +#define FOR_ARITHMETIC_TYPES(V) \ + FOR_BITWISE_TYPES(V) \ + V(int, int) \ + V(uint, unsigned int) \ + V(int8, int8_t) \ + V(int16, int16_t) \ + V(int32, int32_t) \ + V(int64, int64_t) \ + V(intptr, intptr_t) \ + V(ssize, Py_ssize_t) + +// We define atomic load, store, exchange, and compare_exchange on these types +#define FOR_ALL_TYPES(V) \ + FOR_ARITHMETIC_TYPES(V) \ + V(ptr, void*) + +#define IMPL_TEST_ADD(suffix, dtype) \ +static PyObject * \ +test_atomic_add_##suffix(PyObject *self, PyObject *obj) { \ + dtype x = 0; \ + assert(_Py_atomic_add_##suffix(&x, 1) == 0); \ + assert("a" && x == 1); \ + assert(_Py_atomic_add_##suffix(&x, 2) == 1); \ + assert(x == 3); \ + assert(_Py_atomic_add_##suffix(&x, -2) == 3); \ + assert("b" && x == 1); \ + assert(_Py_atomic_add_##suffix(&x, -1) == 1); \ + assert(x == 0); \ + assert(_Py_atomic_add_##suffix(&x, -1) == 0); \ + assert(x == (dtype)-1); \ + assert(_Py_atomic_add_##suffix(&x, -2) == (dtype)-1); \ + assert(x == (dtype)-3); \ + assert(_Py_atomic_add_##suffix(&x, 2) == (dtype)-3); \ + assert(x == (dtype)-1); \ + Py_RETURN_NONE; \ +} +FOR_ARITHMETIC_TYPES(IMPL_TEST_ADD) + +#define IMPL_TEST_COMPARE_EXCHANGE(suffix, dtype) \ +static PyObject * \ +test_atomic_compare_exchange_##suffix(PyObject *self, PyObject *obj) { \ + dtype x = (dtype)0; \ + dtype y = (dtype)1; \ + dtype z = (dtype)2; \ + assert(_Py_atomic_compare_exchange_##suffix(&x, y, z) == 0); \ + assert(x == 0); \ + assert(_Py_atomic_compare_exchange_##suffix(&x, 0, z) == 1); \ + assert(x == z); \ + assert(_Py_atomic_compare_exchange_##suffix(&x, y, z) == 0); \ + assert(x == z); \ + Py_RETURN_NONE; \ +} +FOR_ALL_TYPES(IMPL_TEST_COMPARE_EXCHANGE) + +#define IMPL_TEST_EXCHANGE(suffix, dtype) \ +static PyObject * \ +test_atomic_exchange_##suffix(PyObject *self, PyObject *obj) { \ + dtype x = (dtype)0; \ + dtype y = (dtype)1; \ + dtype z = (dtype)2; \ + assert(_Py_atomic_exchange_##suffix(&x, y) == (dtype)0); \ + assert(x == (dtype)1); \ + assert(_Py_atomic_exchange_##suffix(&x, z) == (dtype)1); \ + assert(x == (dtype)2); \ + assert(_Py_atomic_exchange_##suffix(&x, y) == (dtype)2); \ + assert(x == (dtype)1); \ + Py_RETURN_NONE; \ +} +FOR_ALL_TYPES(IMPL_TEST_EXCHANGE) + +#define IMPL_TEST_LOAD_STORE(suffix, dtype) \ +static PyObject * \ +test_atomic_load_store_##suffix(PyObject *self, PyObject *obj) { \ + dtype x = (dtype)0; \ + dtype y = (dtype)1; \ + dtype z = (dtype)2; \ + assert(_Py_atomic_load_##suffix(&x) == (dtype)0); \ + assert(x == (dtype)0); \ + _Py_atomic_store_##suffix(&x, y); \ + assert(_Py_atomic_load_##suffix(&x) == (dtype)1); \ + assert(x == (dtype)1); \ + _Py_atomic_store_##suffix##_relaxed(&x, z); \ + assert(_Py_atomic_load_##suffix##_relaxed(&x) == (dtype)2); \ + assert(x == (dtype)2); \ + Py_RETURN_NONE; \ +} +FOR_ALL_TYPES(IMPL_TEST_LOAD_STORE) + +#define IMPL_TEST_AND_OR(suffix, dtype) \ +static PyObject * \ +test_atomic_and_or_##suffix(PyObject *self, PyObject *obj) { \ + dtype x = (dtype)0; \ + dtype y = (dtype)1; \ + dtype z = (dtype)3; \ + assert(_Py_atomic_or_##suffix(&x, z) == (dtype)0); \ + assert(x == (dtype)3); \ + assert(_Py_atomic_and_##suffix(&x, y) == (dtype)3); \ + assert(x == (dtype)1); \ + Py_RETURN_NONE; \ +} +FOR_BITWISE_TYPES(IMPL_TEST_AND_OR) + +static PyObject * +test_atomic_fences(PyObject *self, PyObject *obj) { + // Just make sure that the fences compile. We are not + // testing any synchronizing ordering. + _Py_atomic_fence_seq_cst(); + _Py_atomic_fence_release(); + Py_RETURN_NONE; +} + +// NOTE: all tests should start with "test_atomic_" to be included +// in test_pyatomic.py + +#define BIND_TEST_ADD(suffix, dtype) \ + {"test_atomic_add_" #suffix, test_atomic_add_##suffix, METH_NOARGS}, +#define BIND_TEST_COMPARE_EXCHANGE(suffix, dtype) \ + {"test_atomic_compare_exchange_" #suffix, test_atomic_compare_exchange_##suffix, METH_NOARGS}, +#define BIND_TEST_EXCHANGE(suffix, dtype) \ + {"test_atomic_exchange_" #suffix, test_atomic_exchange_##suffix, METH_NOARGS}, +#define BIND_TEST_LOAD_STORE(suffix, dtype) \ + {"test_atomic_load_store_" #suffix, test_atomic_load_store_##suffix, METH_NOARGS}, +#define BIND_TEST_AND_OR(suffix, dtype) \ + {"test_atomic_and_or_" #suffix, test_atomic_and_or_##suffix, METH_NOARGS}, + +static PyMethodDef test_methods[] = { + FOR_ARITHMETIC_TYPES(BIND_TEST_ADD) + FOR_ALL_TYPES(BIND_TEST_COMPARE_EXCHANGE) + FOR_ALL_TYPES(BIND_TEST_EXCHANGE) + FOR_ALL_TYPES(BIND_TEST_LOAD_STORE) + FOR_BITWISE_TYPES(BIND_TEST_AND_OR) + {"test_atomic_fences", test_atomic_fences, METH_NOARGS}, + {NULL, NULL} /* sentinel */ +}; + +int +_PyTestCapi_Init_PyAtomic(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index a7a98d1eea5bd1..1094b4c544b279 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -4325,6 +4325,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_GC(m) < 0) { return NULL; } + if (_PyTestCapi_Init_PyAtomic(m) < 0) { + return NULL; + } #ifndef LIMITED_API_AVAILABLE PyModule_AddObjectRef(m, "LIMITED_API_AVAILABLE", Py_False); diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index 8c0fd0cf052b0e..0a02929db438b8 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -112,6 +112,7 @@ + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 87d33ebe28e475..4ba6011d8af5b9 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -66,6 +66,9 @@ Source Files + + Source Files + Source Files diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index b0e62864421e17..1107d479c036f5 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -300,6 +300,8 @@ + + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index d5f61e9c5d7c89..9f0a3f27d6381f 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -774,6 +774,12 @@ Include\internal + + Include + + + Include + From 9adf4f8e0108e8493c8181ce953ebbfeb7110e55 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 22 Aug 2023 13:16:00 -0700 Subject: [PATCH 02/24] Fix blurb name --- ....wceHZm.rst => 2023-08-22-13-00-54.gh-issue-108337.wceHZm.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename Misc/NEWS.d/next/C API/{2023-08-22-13-00-54.bpo-108337.wceHZm.rst => 2023-08-22-13-00-54.gh-issue-108337.wceHZm.rst} (100%) diff --git a/Misc/NEWS.d/next/C API/2023-08-22-13-00-54.bpo-108337.wceHZm.rst b/Misc/NEWS.d/next/C API/2023-08-22-13-00-54.gh-issue-108337.wceHZm.rst similarity index 100% rename from Misc/NEWS.d/next/C API/2023-08-22-13-00-54.bpo-108337.wceHZm.rst rename to Misc/NEWS.d/next/C API/2023-08-22-13-00-54.gh-issue-108337.wceHZm.rst From d70e8ae9e6fc64d3b6f1e08b13a6d8621588de4e Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 22 Aug 2023 13:35:01 -0700 Subject: [PATCH 03/24] Move pyatomic_std/gcc/msc.h to Include/cpython Placates Tools/c-analyzer/check-c-globals.py. --- Include/{ => cpython}/pyatomic_gcc.h | 0 Include/{ => cpython}/pyatomic_msc.h | 0 Include/{ => cpython}/pyatomic_std.h | 0 Include/pyatomic.h | 6 +++--- Makefile.pre.in | 4 ++-- PCbuild/pythoncore.vcxproj | 2 +- PCbuild/pythoncore.vcxproj.filters | 6 +++--- 7 files changed, 9 insertions(+), 9 deletions(-) rename Include/{ => cpython}/pyatomic_gcc.h (100%) rename Include/{ => cpython}/pyatomic_msc.h (100%) rename Include/{ => cpython}/pyatomic_std.h (100%) diff --git a/Include/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h similarity index 100% rename from Include/pyatomic_gcc.h rename to Include/cpython/pyatomic_gcc.h diff --git a/Include/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h similarity index 100% rename from Include/pyatomic_msc.h rename to Include/cpython/pyatomic_msc.h diff --git a/Include/pyatomic_std.h b/Include/cpython/pyatomic_std.h similarity index 100% rename from Include/pyatomic_std.h rename to Include/cpython/pyatomic_std.h diff --git a/Include/pyatomic.h b/Include/pyatomic.h index b8ad93562eb024..ea766ed2710037 100644 --- a/Include/pyatomic.h +++ b/Include/pyatomic.h @@ -357,13 +357,13 @@ _Py_atomic_fence_release(void); #if _Py_USE_GCC_BUILTIN_ATOMICS #define Py_ATOMIC_GCC_H -#include "pyatomic_gcc.h" +#include "cpython/pyatomic_gcc.h" #elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_ATOMICS__) #define Py_ATOMIC_STD_H -#include "pyatomic_std.h" +#include "cpython/pyatomic_std.h" #elif defined(_MSC_VER) #define Py_ATOMIC_MSC_H -#include "pyatomic_msc.h" +#include "cpython/pyatomic_msc.h" #else #error "define pyatomic for this platform" #endif diff --git a/Makefile.pre.in b/Makefile.pre.in index 04b230b15b8a4d..f1ed9735f9b3cd 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1655,8 +1655,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/osmodule.h \ $(srcdir)/Include/patchlevel.h \ $(srcdir)/Include/pyatomic.h \ - $(srcdir)/Include/pyatomic_gcc.h \ - $(srcdir)/Include/pyatomic_std.h \ $(srcdir)/Include/pybuffer.h \ $(srcdir)/Include/pycapsule.h \ $(srcdir)/Include/pydtrace.h \ @@ -1724,6 +1722,8 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/optimizer.h \ $(srcdir)/Include/cpython/picklebufobject.h \ $(srcdir)/Include/cpython/pthread_stubs.h \ + $(srcdir)/Include/cpython/pyatomic_gcc.h \ + $(srcdir)/Include/cpython/pyatomic_std.h \ $(srcdir)/Include/cpython/pyctype.h \ $(srcdir)/Include/cpython/pydebug.h \ $(srcdir)/Include/cpython/pyerrors.h \ diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 1107d479c036f5..05260bee6b3f0c 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -166,6 +166,7 @@ + @@ -301,7 +302,6 @@ - diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 9f0a3f27d6381f..77eaabb4d75375 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -423,6 +423,9 @@ Include + + Include + Include @@ -777,9 +780,6 @@ Include - - Include - From 927430dd2451eda13ac0a28b6dbc19f075dbf482 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 22 Aug 2023 14:19:27 -0700 Subject: [PATCH 04/24] Fix _InterlockedCompareExchange64 calls on MSVC x86 --- Include/cpython/pyatomic_msc.h | 8 ++++---- Modules/_testcapi/pyatomic.c | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 626bc7a84bdbf2..86c49ae9f538d2 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -47,7 +47,7 @@ _Py_atomic_add_int64(volatile int64_t *address, int64_t value) for (;;) { int64_t old_value = *address; int64_t new_value = old_value + value; - if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { return old_value; } } @@ -230,7 +230,7 @@ _Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) for (;;) { int64_t old_value = *address; int64_t new_value = value; - if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { return old_value; } } @@ -322,7 +322,7 @@ _Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) for (;;) { uint64_t old_value = *address; uint64_t new_value = old_value & value; - if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { return old_value; } } @@ -366,7 +366,7 @@ _Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) for (;;) { uint64_t old_value = *address; uint64_t new_value = old_value | value; - if (_InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { return old_value; } } diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index f435dc55ac26b9..cdeef853d37f26 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -41,11 +41,11 @@ static PyObject * \ test_atomic_add_##suffix(PyObject *self, PyObject *obj) { \ dtype x = 0; \ assert(_Py_atomic_add_##suffix(&x, 1) == 0); \ - assert("a" && x == 1); \ + assert(x == 1); \ assert(_Py_atomic_add_##suffix(&x, 2) == 1); \ assert(x == 3); \ assert(_Py_atomic_add_##suffix(&x, -2) == 3); \ - assert("b" && x == 1); \ + assert(x == 1); \ assert(_Py_atomic_add_##suffix(&x, -1) == 1); \ assert(x == 0); \ assert(_Py_atomic_add_##suffix(&x, -1) == 0); \ From 2d6f9508af338ca23d576439dc6865a0d9c66090 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 10:58:42 -0700 Subject: [PATCH 05/24] Move pyatomic.h to Include/cpython --- Include/{ => cpython}/pyatomic.h | 0 Makefile.pre.in | 2 +- Modules/_testcapi/pyatomic.c | 2 +- PCbuild/pythoncore.vcxproj | 2 +- PCbuild/pythoncore.vcxproj.filters | 6 +++--- Tools/build/stable_abi.py | 1 - 6 files changed, 6 insertions(+), 7 deletions(-) rename Include/{ => cpython}/pyatomic.h (100%) diff --git a/Include/pyatomic.h b/Include/cpython/pyatomic.h similarity index 100% rename from Include/pyatomic.h rename to Include/cpython/pyatomic.h diff --git a/Makefile.pre.in b/Makefile.pre.in index f1ed9735f9b3cd..adc013dc131944 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1654,7 +1654,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/osdefs.h \ $(srcdir)/Include/osmodule.h \ $(srcdir)/Include/patchlevel.h \ - $(srcdir)/Include/pyatomic.h \ $(srcdir)/Include/pybuffer.h \ $(srcdir)/Include/pycapsule.h \ $(srcdir)/Include/pydtrace.h \ @@ -1722,6 +1721,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/optimizer.h \ $(srcdir)/Include/cpython/picklebufobject.h \ $(srcdir)/Include/cpython/pthread_stubs.h \ + $(srcdir)/Include/cpython/pyatomic.h \ $(srcdir)/Include/cpython/pyatomic_gcc.h \ $(srcdir)/Include/cpython/pyatomic_std.h \ $(srcdir)/Include/cpython/pyctype.h \ diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index cdeef853d37f26..da54a87915923c 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -8,7 +8,7 @@ #undef NDEBUG #include "Python.h" -#include "pyatomic.h" +#include "cpython/pyatomic.h" #include "parts.h" // We define atomic bitwise operations on these types diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 05260bee6b3f0c..7f9f24f9274df0 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -166,6 +166,7 @@ + @@ -301,7 +302,6 @@ - diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 77eaabb4d75375..b9d09716df42f3 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -423,6 +423,9 @@ Include + + Include + Include @@ -777,9 +780,6 @@ Include\internal - - Include - diff --git a/Tools/build/stable_abi.py b/Tools/build/stable_abi.py index 7cba788ff33578..8b958cc71c7aed 100644 --- a/Tools/build/stable_abi.py +++ b/Tools/build/stable_abi.py @@ -39,7 +39,6 @@ "genobject.h", "longintrepr.h", "parsetok.h", - "pyatomic.h", "token.h", "ucnhash.h", } From bf27448b1343fe4a029daf065308190d33b332e4 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 11:23:49 -0700 Subject: [PATCH 06/24] Remove volatile from function signature. Instead use volatile casts in MSVC implementation where they are meaningful. --- Include/cpython/pyatomic.h | 218 ++++++++--------- Include/cpython/pyatomic_gcc.h | 242 +++++++++--------- Include/cpython/pyatomic_msc.h | 331 +++++++++++++------------ Include/cpython/pyatomic_std.h | 436 ++++++++++++++++----------------- 4 files changed, 613 insertions(+), 614 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index ea766ed2710037..85592c28f26a21 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -2,340 +2,340 @@ #define Py_ATOMIC_H static inline int -_Py_atomic_add_int(volatile int *address, int value); +_Py_atomic_add_int(int *address, int value); static inline int8_t -_Py_atomic_add_int8(volatile int8_t *address, int8_t value); +_Py_atomic_add_int8(int8_t *address, int8_t value); static inline int16_t -_Py_atomic_add_int16(volatile int16_t *address, int16_t value); +_Py_atomic_add_int16(int16_t *address, int16_t value); static inline int32_t -_Py_atomic_add_int32(volatile int32_t *address, int32_t value); +_Py_atomic_add_int32(int32_t *address, int32_t value); static inline int64_t -_Py_atomic_add_int64(volatile int64_t *address, int64_t value); +_Py_atomic_add_int64(int64_t *address, int64_t value); static inline intptr_t -_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value); +_Py_atomic_add_intptr(intptr_t *address, intptr_t value); static inline unsigned int -_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value); +_Py_atomic_add_uint(unsigned int *address, unsigned int value); static inline uint8_t -_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value); +_Py_atomic_add_uint8(uint8_t *address, uint8_t value); static inline uint16_t -_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value); +_Py_atomic_add_uint16(uint16_t *address, uint16_t value); static inline uint32_t -_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value); +_Py_atomic_add_uint32(uint32_t *address, uint32_t value); static inline uint64_t -_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value); +_Py_atomic_add_uint64(uint64_t *address, uint64_t value); static inline uintptr_t -_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value); +_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value); static inline Py_ssize_t -_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value); +_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value); static inline int -_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value); +_Py_atomic_compare_exchange_int(int *address, int expected, int value); static inline int -_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value); +_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value); static inline int -_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value); +_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value); static inline int -_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value); +_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value); static inline int -_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value); +_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value); static inline int -_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value); +_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value); static inline int -_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value); +_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value); static inline int -_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value); +_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value); static inline int -_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value); +_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value); static inline int -_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value); +_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value); static inline int -_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value); +_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value); static inline int -_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value); +_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value); static inline int -_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value); +_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value); static inline int -_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value); +_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value); static inline int -_Py_atomic_exchange_int(volatile int *address, int value); +_Py_atomic_exchange_int(int *address, int value); static inline int8_t -_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value); +_Py_atomic_exchange_int8(int8_t *address, int8_t value); static inline int16_t -_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value); +_Py_atomic_exchange_int16(int16_t *address, int16_t value); static inline int32_t -_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value); +_Py_atomic_exchange_int32(int32_t *address, int32_t value); static inline int64_t -_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value); +_Py_atomic_exchange_int64(int64_t *address, int64_t value); static inline intptr_t -_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value); +_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value); static inline unsigned int -_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value); +_Py_atomic_exchange_uint(unsigned int *address, unsigned int value); static inline uint8_t -_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value); +_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value); static inline uint16_t -_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value); +_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value); static inline uint32_t -_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value); +_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value); static inline uint64_t -_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value); +_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value); static inline uintptr_t -_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value); +_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value); static inline Py_ssize_t -_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value); +_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value); static inline void * -_Py_atomic_exchange_ptr(volatile void *address, void *value); +_Py_atomic_exchange_ptr(void *address, void *value); static inline uint8_t -_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value); +_Py_atomic_and_uint8(uint8_t *address, uint8_t value); static inline uint16_t -_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value); +_Py_atomic_and_uint16(uint16_t *address, uint16_t value); static inline uint32_t -_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value); +_Py_atomic_and_uint32(uint32_t *address, uint32_t value); static inline uint64_t -_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value); +_Py_atomic_and_uint64(uint64_t *address, uint64_t value); static inline uintptr_t -_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value); +_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value); static inline uint8_t -_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value); +_Py_atomic_or_uint8(uint8_t *address, uint8_t value); static inline uint16_t -_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value); +_Py_atomic_or_uint16(uint16_t *address, uint16_t value); static inline uint32_t -_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value); +_Py_atomic_or_uint32(uint32_t *address, uint32_t value); static inline uint64_t -_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value); +_Py_atomic_or_uint64(uint64_t *address, uint64_t value); static inline uintptr_t -_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value); +_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value); static inline int -_Py_atomic_load_int(const volatile int *address); +_Py_atomic_load_int(const int *address); static inline int8_t -_Py_atomic_load_int8(const volatile int8_t *address); +_Py_atomic_load_int8(const int8_t *address); static inline int16_t -_Py_atomic_load_int16(const volatile int16_t *address); +_Py_atomic_load_int16(const int16_t *address); static inline int32_t -_Py_atomic_load_int32(const volatile int32_t *address); +_Py_atomic_load_int32(const int32_t *address); static inline int64_t -_Py_atomic_load_int64(const volatile int64_t *address); +_Py_atomic_load_int64(const int64_t *address); static inline intptr_t -_Py_atomic_load_intptr(const volatile intptr_t *address); +_Py_atomic_load_intptr(const intptr_t *address); static inline uint8_t -_Py_atomic_load_uint8(const volatile uint8_t *address); +_Py_atomic_load_uint8(const uint8_t *address); static inline uint16_t -_Py_atomic_load_uint16(const volatile uint16_t *address); +_Py_atomic_load_uint16(const uint16_t *address); static inline uint32_t -_Py_atomic_load_uint32(const volatile uint32_t *address); +_Py_atomic_load_uint32(const uint32_t *address); static inline uint64_t -_Py_atomic_load_uint64(const volatile uint64_t *address); +_Py_atomic_load_uint64(const uint64_t *address); static inline uintptr_t -_Py_atomic_load_uintptr(const volatile uintptr_t *address); +_Py_atomic_load_uintptr(const uintptr_t *address); static inline unsigned int -_Py_atomic_load_uint(const volatile unsigned int *address); +_Py_atomic_load_uint(const unsigned int *address); static inline Py_ssize_t -_Py_atomic_load_ssize(const volatile Py_ssize_t *address); +_Py_atomic_load_ssize(const Py_ssize_t *address); static inline void * -_Py_atomic_load_ptr(const volatile void *address); +_Py_atomic_load_ptr(const void *address); static inline int -_Py_atomic_load_int_relaxed(const volatile int *address); +_Py_atomic_load_int_relaxed(const int *address); static inline int8_t -_Py_atomic_load_int8_relaxed(const volatile int8_t *address); +_Py_atomic_load_int8_relaxed(const int8_t *address); static inline int16_t -_Py_atomic_load_int16_relaxed(const volatile int16_t *address); +_Py_atomic_load_int16_relaxed(const int16_t *address); static inline int32_t -_Py_atomic_load_int32_relaxed(const volatile int32_t *address); +_Py_atomic_load_int32_relaxed(const int32_t *address); static inline int64_t -_Py_atomic_load_int64_relaxed(const volatile int64_t *address); +_Py_atomic_load_int64_relaxed(const int64_t *address); static inline intptr_t -_Py_atomic_load_intptr_relaxed(const volatile intptr_t *address); +_Py_atomic_load_intptr_relaxed(const intptr_t *address); static inline uint8_t -_Py_atomic_load_uint8_relaxed(const volatile uint8_t *address); +_Py_atomic_load_uint8_relaxed(const uint8_t *address); static inline uint16_t -_Py_atomic_load_uint16_relaxed(const volatile uint16_t *address); +_Py_atomic_load_uint16_relaxed(const uint16_t *address); static inline uint32_t -_Py_atomic_load_uint32_relaxed(const volatile uint32_t *address); +_Py_atomic_load_uint32_relaxed(const uint32_t *address); static inline uint64_t -_Py_atomic_load_uint64_relaxed(const volatile uint64_t *address); +_Py_atomic_load_uint64_relaxed(const uint64_t *address); static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t *address); +_Py_atomic_load_uintptr_relaxed(const uintptr_t *address); static inline unsigned int -_Py_atomic_load_uint_relaxed(const volatile unsigned int *address); +_Py_atomic_load_uint_relaxed(const unsigned int *address); static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t *address); +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address); static inline void * -_Py_atomic_load_ptr_relaxed(const volatile void *address); +_Py_atomic_load_ptr_relaxed(const void *address); static inline void -_Py_atomic_store_int(volatile int *address, int value); +_Py_atomic_store_int(int *address, int value); static inline void -_Py_atomic_store_int8(volatile int8_t *address, int8_t value); +_Py_atomic_store_int8(int8_t *address, int8_t value); static inline void -_Py_atomic_store_int16(volatile int16_t *address, int16_t value); +_Py_atomic_store_int16(int16_t *address, int16_t value); static inline void -_Py_atomic_store_int32(volatile int32_t *address, int32_t value); +_Py_atomic_store_int32(int32_t *address, int32_t value); static inline void -_Py_atomic_store_int64(volatile int64_t *address, int64_t value); +_Py_atomic_store_int64(int64_t *address, int64_t value); static inline void -_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value); +_Py_atomic_store_intptr(intptr_t *address, intptr_t value); static inline void -_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value); +_Py_atomic_store_uint8(uint8_t *address, uint8_t value); static inline void -_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value); +_Py_atomic_store_uint16(uint16_t *address, uint16_t value); static inline void -_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value); +_Py_atomic_store_uint32(uint32_t *address, uint32_t value); static inline void -_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value); +_Py_atomic_store_uint64(uint64_t *address, uint64_t value); static inline void -_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value); +_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value); static inline void -_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value); +_Py_atomic_store_uint(unsigned int *address, unsigned int value); static inline void -_Py_atomic_store_ptr(volatile void *address, void *value); +_Py_atomic_store_ptr(void *address, void *value); static inline void -_Py_atomic_store_ssize(volatile Py_ssize_t* address, Py_ssize_t value); +_Py_atomic_store_ssize(Py_ssize_t* address, Py_ssize_t value); static inline void -_Py_atomic_store_int_relaxed(volatile int *address, int value); +_Py_atomic_store_int_relaxed(int *address, int value); static inline void -_Py_atomic_store_int8_relaxed(volatile int8_t *address, int8_t value); +_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value); static inline void -_Py_atomic_store_int16_relaxed(volatile int16_t *address, int16_t value); +_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value); static inline void -_Py_atomic_store_int32_relaxed(volatile int32_t *address, int32_t value); +_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value); static inline void -_Py_atomic_store_int64_relaxed(volatile int64_t *address, int64_t value); +_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value); static inline void -_Py_atomic_store_intptr_relaxed(volatile intptr_t *address, intptr_t value); +_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value); static inline void -_Py_atomic_store_uint8_relaxed(volatile uint8_t* address, uint8_t value); +_Py_atomic_store_uint8_relaxed(uint8_t* address, uint8_t value); static inline void -_Py_atomic_store_uint16_relaxed(volatile uint16_t *address, uint16_t value); +_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value); static inline void -_Py_atomic_store_uint32_relaxed(volatile uint32_t *address, uint32_t value); +_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value); static inline void -_Py_atomic_store_uint64_relaxed(volatile uint64_t *address, uint64_t value); +_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value); static inline void -_Py_atomic_store_uintptr_relaxed(volatile uintptr_t *address, uintptr_t value); +_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value); static inline void -_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value); +_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value); static inline void -_Py_atomic_store_ptr_relaxed(volatile void *address, void *value); +_Py_atomic_store_ptr_relaxed(void *address, void *value); static inline void -_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t *address, Py_ssize_t value); +_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value); static inline void -_Py_atomic_store_uint64_release(volatile uint64_t *address, uint64_t value); +_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value); static inline void -_Py_atomic_store_ptr_release(volatile void *address, void *value); +_Py_atomic_store_ptr_release(void *address, void *value); static inline void diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index 64d917933d12da..cdf3497fd36f83 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -8,662 +8,662 @@ // GCC 4.8+ without needing to compile with --std=c11 or --std=gnu11. static inline int -_Py_atomic_add_int(volatile int *address, int value) -{ - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); -} - -static inline unsigned int -_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_add_int(int *address, int value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_add_int8(volatile int8_t *address, int8_t value) +_Py_atomic_add_int8(int8_t *address, int8_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_add_int16(volatile int16_t *address, int16_t value) +_Py_atomic_add_int16(int16_t *address, int16_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_add_int32(volatile int32_t *address, int32_t value) +_Py_atomic_add_int32(int32_t *address, int32_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_add_int64(volatile int64_t *address, int64_t value) +_Py_atomic_add_int64(int64_t *address, int64_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_add_intptr(intptr_t *address, intptr_t value) +{ + return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); +} + +static inline unsigned int +_Py_atomic_add_uint(unsigned int *address, unsigned int value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_add_uint8(uint8_t *address, uint8_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_add_uint16(uint16_t *address, uint16_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_add_uint32(uint32_t *address, uint32_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_add_uint64(uint64_t *address, uint64_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) { return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value) +_Py_atomic_compare_exchange_int(int *address, int expected, int value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value) +_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value) +_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) { return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) { - volatile void *e = expected; - return __atomic_compare_exchange_n((volatile void **)address, &e, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + void *e = expected; + return __atomic_compare_exchange_n((void **)address, &e, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_exchange_int(volatile int *address, int value) +_Py_atomic_exchange_int(int *address, int value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value) +_Py_atomic_exchange_int8(int8_t *address, int8_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value) +_Py_atomic_exchange_int16(int16_t *address, int16_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value) +_Py_atomic_exchange_int32(int32_t *address, int32_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) +_Py_atomic_exchange_int64(int64_t *address, int64_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *address, unsigned int value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value) { return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); } static inline void * -_Py_atomic_exchange_ptr(volatile void *address, void *value) +_Py_atomic_exchange_ptr(void *address, void *value) { return __atomic_exchange_n((void **)address, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_and_uint8(uint8_t *address, uint8_t value) { return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_and_uint16(uint16_t *address, uint16_t value) { return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_and_uint32(uint32_t *address, uint32_t value) { return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_and_uint64(uint64_t *address, uint64_t value) { return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value) { return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_or_uint8(uint8_t *address, uint8_t value) { return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_or_uint16(uint16_t *address, uint16_t value) { return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_or_uint32(uint32_t *address, uint32_t value) { return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_or_uint64(uint64_t *address, uint64_t value) { return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value) { return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_load_int(const volatile int *address) +_Py_atomic_load_int(const int *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_load_int8(const volatile int8_t *address) +_Py_atomic_load_int8(const int8_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_load_int16(const volatile int16_t *address) +_Py_atomic_load_int16(const int16_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_load_int32(const volatile int32_t *address) +_Py_atomic_load_int32(const int32_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_load_int64(const volatile int64_t *address) +_Py_atomic_load_int64(const int64_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_load_intptr(const volatile intptr_t *address) +_Py_atomic_load_intptr(const intptr_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_load_uint8(const volatile uint8_t *address) +_Py_atomic_load_uint8(const uint8_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_load_uint16(const volatile uint16_t *address) +_Py_atomic_load_uint16(const uint16_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_load_uint32(const volatile uint32_t *address) +_Py_atomic_load_uint32(const uint32_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_load_uint64(const volatile uint64_t *address) +_Py_atomic_load_uint64(const uint64_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_load_uintptr(const volatile uintptr_t *address) +_Py_atomic_load_uintptr(const uintptr_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_load_uint(const volatile unsigned int *address) +_Py_atomic_load_uint(const unsigned int *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_load_ssize(const volatile Py_ssize_t *address) +_Py_atomic_load_ssize(const Py_ssize_t *address) { return __atomic_load_n(address, __ATOMIC_SEQ_CST); } static inline void * -_Py_atomic_load_ptr(const volatile void *address) +_Py_atomic_load_ptr(const void *address) { - return (void *)__atomic_load_n((volatile void **)address, __ATOMIC_SEQ_CST); + return (void *)__atomic_load_n((void **)address, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_load_int_relaxed(const volatile int *address) +_Py_atomic_load_int_relaxed(const int *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline int8_t -_Py_atomic_load_int8_relaxed(const volatile int8_t *address) +_Py_atomic_load_int8_relaxed(const int8_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline int16_t -_Py_atomic_load_int16_relaxed(const volatile int16_t *address) +_Py_atomic_load_int16_relaxed(const int16_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline int32_t -_Py_atomic_load_int32_relaxed(const volatile int32_t *address) +_Py_atomic_load_int32_relaxed(const int32_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline int64_t -_Py_atomic_load_int64_relaxed(const volatile int64_t *address) +_Py_atomic_load_int64_relaxed(const int64_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const volatile intptr_t *address) +_Py_atomic_load_intptr_relaxed(const intptr_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const volatile uint8_t *address) +_Py_atomic_load_uint8_relaxed(const uint8_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const volatile uint16_t *address) +_Py_atomic_load_uint16_relaxed(const uint16_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const volatile uint32_t *address) +_Py_atomic_load_uint32_relaxed(const uint32_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const volatile uint64_t *address) +_Py_atomic_load_uint64_relaxed(const uint64_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t *address) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline unsigned int -_Py_atomic_load_uint_relaxed(const volatile unsigned int *address) +_Py_atomic_load_uint_relaxed(const unsigned int *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t *address) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) { return __atomic_load_n(address, __ATOMIC_RELAXED); } static inline void * -_Py_atomic_load_ptr_relaxed(const volatile void *address) +_Py_atomic_load_ptr_relaxed(const void *address) { - return (void *)__atomic_load_n((const volatile void **)address, __ATOMIC_RELAXED); + return (void *)__atomic_load_n((const void **)address, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int(volatile int *address, int value) +_Py_atomic_store_int(int *address, int value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int8(volatile int8_t *address, int8_t value) +_Py_atomic_store_int8(int8_t *address, int8_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int16(volatile int16_t *address, int16_t value) +_Py_atomic_store_int16(int16_t *address, int16_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int32(volatile int32_t *address, int32_t value) +_Py_atomic_store_int32(int32_t *address, int32_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int64(volatile int64_t *address, int64_t value) +_Py_atomic_store_int64(int64_t *address, int64_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_store_intptr(intptr_t *address, intptr_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_store_uint8(uint8_t *address, uint8_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_store_uint16(uint16_t *address, uint16_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_store_uint32(uint32_t *address, uint32_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64(uint64_t *address, uint64_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_store_uint(unsigned int *address, unsigned int value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_ptr(volatile void *address, void *value) +_Py_atomic_store_ptr(void *address, void *value) { - __atomic_store_n((volatile void **)address, value, __ATOMIC_SEQ_CST); + __atomic_store_n((void **)address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *address, Py_ssize_t value) { __atomic_store_n(address, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int_relaxed(volatile int *address, int value) +_Py_atomic_store_int_relaxed(int *address, int value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int8_relaxed(volatile int8_t *address, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int16_relaxed(volatile int16_t *address, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int32_relaxed(volatile int32_t *address, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int64_relaxed(volatile int64_t *address, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_intptr_relaxed(volatile intptr_t *address, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint8_relaxed(volatile uint8_t *address, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint16_relaxed(volatile uint16_t *address, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint32_relaxed(volatile uint32_t *address, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint64_relaxed(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uintptr_relaxed(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_ptr_relaxed(volatile void *address, void *value) +_Py_atomic_store_ptr_relaxed(void *address, void *value) { - __atomic_store_n((volatile void **)address, value, __ATOMIC_RELAXED); + __atomic_store_n((void **)address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) { __atomic_store_n(address, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint64_release(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value) { __atomic_store_n(address, value, __ATOMIC_RELEASE); } static inline void -_Py_atomic_store_ptr_release(volatile void *address, void *value) +_Py_atomic_store_ptr_release(void *address, void *value) { - __atomic_store_n((volatile void **)address, value, __ATOMIC_RELEASE); + __atomic_store_n((void **)address, value, __ATOMIC_RELEASE); } static inline void diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 86c49ae9f538d2..291952dbe1c3a9 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -9,45 +9,39 @@ static inline int -_Py_atomic_add_int(volatile int *address, int value) +_Py_atomic_add_int(int *address, int value) { return (int)_InterlockedExchangeAdd((volatile long*)address, (long)value); } -static inline unsigned int -_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value) -{ - return (unsigned int)_InterlockedExchangeAdd((volatile long*)address, (long)value); -} - static inline int8_t -_Py_atomic_add_int8(volatile int8_t *address, int8_t value) +_Py_atomic_add_int8(int8_t *address, int8_t value) { return (int8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); } static inline int16_t -_Py_atomic_add_int16(volatile int16_t *address, int16_t value) +_Py_atomic_add_int16(int16_t *address, int16_t value) { return (int16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); } static inline int32_t -_Py_atomic_add_int32(volatile int32_t *address, int32_t value) +_Py_atomic_add_int32(int32_t *address, int32_t value) { return (int32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); } static inline int64_t -_Py_atomic_add_int64(volatile int64_t *address, int64_t value) +_Py_atomic_add_int64(int64_t *address, int64_t value) { #if defined(_M_X64) || defined(_M_ARM64) return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); #else for (;;) { - int64_t old_value = *address; - int64_t new_value = old_value + value; - if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + __int64 old_value = *(volatile __int64*)address; + __int64 new_value = old_value + (__int64)value; + if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, new_value, old_value)) { return old_value; } } @@ -55,7 +49,7 @@ _Py_atomic_add_int64(volatile int64_t *address, int64_t value) } static inline intptr_t -_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_add_intptr(intptr_t *address, intptr_t value) { #if SIZEOF_VOID_P == 8 return (intptr_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); @@ -64,32 +58,38 @@ _Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) #endif } +static inline unsigned int +_Py_atomic_add_uint(unsigned int *address, unsigned int value) +{ + return (unsigned int)_InterlockedExchangeAdd((volatile long*)address, (long)value); +} + static inline uint8_t -_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_add_uint8(uint8_t *address, uint8_t value) { return (uint8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); } static inline uint16_t -_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_add_uint16(uint16_t *address, uint16_t value) { return (uint16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); } static inline uint32_t -_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_add_uint32(uint32_t *address, uint32_t value) { return (uint32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); } static inline uint64_t -_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_add_uint64(uint64_t *address, uint64_t value) { return (uint64_t)_Py_atomic_add_int64((volatile int64_t*)address, (int64_t)value); } static inline uintptr_t -_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value) { #if SIZEOF_VOID_P == 8 return (uintptr_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); @@ -99,7 +99,7 @@ _Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) } static inline Py_ssize_t -_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 return (Py_ssize_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); @@ -110,79 +110,79 @@ _Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) static inline int -_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value) +_Py_atomic_compare_exchange_int(int *address, int expected, int value) { return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); } static inline int -_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) { return (char)expected == _InterlockedCompareExchange8((volatile char*)address, (char)value, (char)expected); } static inline int -_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) { return (short)expected == _InterlockedCompareExchange16((volatile short*)address, (short)value, (short)expected); } static inline int -_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) { return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); } static inline int -_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) { return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); } static inline int -_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) { return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); } static inline int -_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) { return (char)expected == _InterlockedCompareExchange8((volatile char*)address, (char)value, (char)expected); } static inline int -_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value) +_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) { return (short)expected == _InterlockedCompareExchange16((volatile short*)address, (short)value, (short)expected); } static inline int -_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value) +_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) { return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); } static inline int -_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) { return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); } static inline int -_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) { return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); } static inline int -_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) { return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); } static inline int -_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); @@ -192,45 +192,45 @@ _Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expec } static inline int -_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) { return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); } static inline int -_Py_atomic_exchange_int(volatile int *address, int value) +_Py_atomic_exchange_int(int *address, int value) { return (int)_InterlockedExchange((volatile long*)address, (long)value); } static inline int8_t -_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value) +_Py_atomic_exchange_int8(int8_t *address, int8_t value) { return (int8_t)_InterlockedExchange8((volatile char*)address, (char)value); } static inline int16_t -_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value) +_Py_atomic_exchange_int16(int16_t *address, int16_t value) { return (int16_t)_InterlockedExchange16((volatile short*)address, (short)value); } static inline int32_t -_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value) +_Py_atomic_exchange_int32(int32_t *address, int32_t value) { return (int32_t)_InterlockedExchange((volatile long*)address, (long)value); } static inline int64_t -_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) +_Py_atomic_exchange_int64(int64_t *address, int64_t value) { #if defined(_M_X64) || defined(_M_ARM64) return (int64_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); #else for (;;) { - int64_t old_value = *address; - int64_t new_value = value; - if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + __int64 old_value = *(volatile __int64*)address; + __int64 new_value = (__int64)value; + if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, new_value, old_value)) { return old_value; } } @@ -238,49 +238,49 @@ _Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) } static inline intptr_t -_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value) { return (intptr_t)_InterlockedExchangePointer((void * volatile *)address, (void *)value); } static inline unsigned int -_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *address, unsigned int value) { return (unsigned int)_InterlockedExchange((volatile long*)address, (long)value); } static inline uint8_t -_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value) { return (uint8_t)_InterlockedExchange8((volatile char*)address, (char)value); } static inline uint16_t -_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value) { return (uint16_t)_InterlockedExchange16((volatile short*)address, (short)value); } static inline uint32_t -_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) { return (uint32_t)_InterlockedExchange((volatile long*)address, (long)value); } static inline uint64_t -_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) { return (uint64_t)_Py_atomic_exchange_int64((volatile __int64*)address, (__int64)value); } static inline uintptr_t -_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value) { return (uintptr_t)_InterlockedExchangePointer((void * volatile *)address, (void *)value); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 return (Py_ssize_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); @@ -290,39 +290,39 @@ _Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) } static inline void * -_Py_atomic_exchange_ptr(volatile void *address, void *value) +_Py_atomic_exchange_ptr(void *address, void *value) { return (void *)_InterlockedExchangePointer((void * volatile *)address, (void *)value); } static inline uint8_t -_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_and_uint8(uint8_t *address, uint8_t value) { return (uint8_t)_InterlockedAnd8((volatile char*)address, (char)value); } static inline uint16_t -_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_and_uint16(uint16_t *address, uint16_t value) { return (uint16_t)_InterlockedAnd16((volatile short*)address, (short)value); } static inline uint32_t -_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_and_uint32(uint32_t *address, uint32_t value) { return (uint32_t)_InterlockedAnd((volatile long*)address, (long)value); } static inline uint64_t -_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_and_uint64(uint64_t *address, uint64_t value) { #if defined(_M_X64) || defined(_M_ARM64) return (uint64_t)_InterlockedAnd64((volatile __int64*)address, (__int64)value); #else for (;;) { - uint64_t old_value = *address; + uint64_t old_value = *(volatile uint64_t*)address; uint64_t new_value = old_value & value; - if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if ((__int64)old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { return old_value; } } @@ -330,7 +330,7 @@ _Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) } static inline uintptr_t -_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value) { #if SIZEOF_VOID_P == 8 return (uintptr_t)_InterlockedAnd64((volatile __int64*)address, (__int64)value); @@ -340,31 +340,31 @@ _Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) } static inline uint8_t -_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_or_uint8(uint8_t *address, uint8_t value) { return (uint8_t)_InterlockedOr8((volatile char*)address, (char)value); } static inline uint16_t -_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_or_uint16(uint16_t *address, uint16_t value) { return (uint16_t)_InterlockedOr16((volatile short*)address, (short)value); } static inline uint32_t -_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_or_uint32(uint32_t *address, uint32_t value) { return (uint32_t)_InterlockedOr((volatile long*)address, (long)value); } static inline uint64_t -_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_or_uint64(uint64_t *address, uint64_t value) { #if defined(_M_X64) || defined(_M_ARM64) return (uint64_t)_InterlockedOr64((volatile __int64*)address, (__int64)value); #else for (;;) { - uint64_t old_value = *address; + uint64_t old_value = *(volatile uint64_t *)address; uint64_t new_value = old_value | value; if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { return old_value; @@ -374,7 +374,7 @@ _Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) } static inline uintptr_t -_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value) { #if SIZEOF_VOID_P == 8 return (uintptr_t)_InterlockedOr64((volatile __int64*)address, (__int64)value); @@ -384,10 +384,10 @@ _Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) } static inline int -_Py_atomic_load_int(const volatile int *address) +_Py_atomic_load_int(const int *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile int *)address; #elif defined(_M_ARM64) return (int)__ldar32((unsigned __int32 volatile*)address); #else @@ -396,10 +396,10 @@ _Py_atomic_load_int(const volatile int *address) } static inline int8_t -_Py_atomic_load_int8(const volatile int8_t *address) +_Py_atomic_load_int8(const int8_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile int8_t *)address; #elif defined(_M_ARM64) return (int8_t)__ldar8((unsigned __int8 volatile*)address); #else @@ -408,10 +408,10 @@ _Py_atomic_load_int8(const volatile int8_t *address) } static inline int16_t -_Py_atomic_load_int16(const volatile int16_t *address) +_Py_atomic_load_int16(const int16_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile int16_t *)address; #elif defined(_M_ARM64) return (int16_t)__ldar16((unsigned __int16 volatile*)address); #else @@ -420,10 +420,10 @@ _Py_atomic_load_int16(const volatile int16_t *address) } static inline int32_t -_Py_atomic_load_int32(const volatile int32_t *address) +_Py_atomic_load_int32(const int32_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile int32_t *)address; #elif defined(_M_ARM64) return (int32_t)__ldar32((unsigned __int32 volatile*)address); #else @@ -432,10 +432,10 @@ _Py_atomic_load_int32(const volatile int32_t *address) } static inline int64_t -_Py_atomic_load_int64(const volatile int64_t *address) +_Py_atomic_load_int64(const int64_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile int64_t *)address; #elif defined(_M_ARM64) return __ldar64((unsigned __int64 volatile*)address); #else @@ -444,10 +444,10 @@ _Py_atomic_load_int64(const volatile int64_t *address) } static inline intptr_t -_Py_atomic_load_intptr(const volatile intptr_t *address) +_Py_atomic_load_intptr(const intptr_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile intptr_t *)address; #elif defined(_M_ARM64) return __ldar64((unsigned __int64 volatile*)address); #else @@ -456,10 +456,10 @@ _Py_atomic_load_intptr(const volatile intptr_t *address) } static inline uint8_t -_Py_atomic_load_uint8(const volatile uint8_t *address) +_Py_atomic_load_uint8(const uint8_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile uint8_t *)address; #elif defined(_M_ARM64) return __ldar8((unsigned __int8 volatile*)address); #else @@ -468,10 +468,10 @@ _Py_atomic_load_uint8(const volatile uint8_t *address) } static inline uint16_t -_Py_atomic_load_uint16(const volatile uint16_t *address) +_Py_atomic_load_uint16(const uint16_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile uint16_t *)address; #elif defined(_M_ARM64) return __ldar16((unsigned __int16 volatile*)address); #else @@ -480,10 +480,10 @@ _Py_atomic_load_uint16(const volatile uint16_t *address) } static inline uint32_t -_Py_atomic_load_uint32(const volatile uint32_t *address) +_Py_atomic_load_uint32(const uint32_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile uint32_t *)address; #elif defined(_M_ARM64) return __ldar32((unsigned __int32 volatile*)address); #else @@ -492,10 +492,10 @@ _Py_atomic_load_uint32(const volatile uint32_t *address) } static inline uint64_t -_Py_atomic_load_uint64(const volatile uint64_t *address) +_Py_atomic_load_uint64(const uint64_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile uint64_t *)address; #elif defined(_M_ARM64) return __ldar64((unsigned __int64 volatile*)address); #else @@ -504,10 +504,10 @@ _Py_atomic_load_uint64(const volatile uint64_t *address) } static inline uintptr_t -_Py_atomic_load_uintptr(const volatile uintptr_t *address) +_Py_atomic_load_uintptr(const uintptr_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile uintptr_t *)address; #elif defined(_M_ARM64) return __ldar64((unsigned __int64 volatile*)address); #else @@ -516,10 +516,10 @@ _Py_atomic_load_uintptr(const volatile uintptr_t *address) } static inline unsigned int -_Py_atomic_load_uint(const volatile unsigned int *address) +_Py_atomic_load_uint(const unsigned int *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile unsigned int *)address; #elif defined(_M_ARM64) return __ldar32((unsigned __int32 volatile*)address); #else @@ -528,10 +528,10 @@ _Py_atomic_load_uint(const volatile unsigned int *address) } static inline Py_ssize_t -_Py_atomic_load_ssize(const volatile Py_ssize_t* address) +_Py_atomic_load_ssize(const Py_ssize_t *address) { #if defined(_M_X64) || defined(_M_IX86) - return *address; + return *(volatile Py_ssize_t *)address; #elif defined(_M_ARM64) return __ldar64((unsigned __int64 volatile*)address); #else @@ -540,10 +540,10 @@ _Py_atomic_load_ssize(const volatile Py_ssize_t* address) } static inline void * -_Py_atomic_load_ptr(const volatile void *address) +_Py_atomic_load_ptr(const void *address) { #if defined(_M_X64) || defined(_M_IX86) - return *(void* volatile*)address; + return *(void * volatile *)address; #elif defined(_M_ARM64) return (void *)__ldar64((unsigned __int64 volatile*)address); #else @@ -552,174 +552,173 @@ _Py_atomic_load_ptr(const volatile void *address) } static inline int -_Py_atomic_load_int_relaxed(const volatile int* address) +_Py_atomic_load_int_relaxed(const int *address) { - return *address; + return *(volatile int *)address; } static inline int8_t -_Py_atomic_load_int8_relaxed(const volatile int8_t* address) +_Py_atomic_load_int8_relaxed(const int8_t *address) { - return *address; + return *(volatile int8_t *)address; } static inline int16_t -_Py_atomic_load_int16_relaxed(const volatile int16_t* address) +_Py_atomic_load_int16_relaxed(const int16_t *address) { - return *address; + return *(volatile int16_t *)address; } static inline int32_t -_Py_atomic_load_int32_relaxed(const volatile int32_t* address) +_Py_atomic_load_int32_relaxed(const int32_t *address) { - return *address; + return *(volatile int32_t *)address; } static inline int64_t -_Py_atomic_load_int64_relaxed(const volatile int64_t* address) +_Py_atomic_load_int64_relaxed(const int64_t *address) { - return *address; + return *(volatile int64_t *)address; } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const volatile intptr_t* address) +_Py_atomic_load_intptr_relaxed(const intptr_t *address) { - return *address; + return *(volatile intptr_t *)address; } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const volatile uint8_t* address) +_Py_atomic_load_uint8_relaxed(const uint8_t *address) { - return *address; + return *(volatile uint8_t *)address; } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const volatile uint16_t* address) +_Py_atomic_load_uint16_relaxed(const uint16_t *address) { - return *address; + return *(volatile uint16_t *)address; } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const volatile uint32_t* address) +_Py_atomic_load_uint32_relaxed(const uint32_t *address) { - return *address; + return *(volatile uint32_t *)address; } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const volatile uint64_t* address) +_Py_atomic_load_uint64_relaxed(const uint64_t *address) { - return *address; + return *(volatile uint64_t *)address; } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t* address) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *address) { - return *address; + return *(volatile uintptr_t *)address; } static inline unsigned int -_Py_atomic_load_uint_relaxed(const volatile unsigned int *address) +_Py_atomic_load_uint_relaxed(const unsigned int *address) { - return *address; + return *(volatile unsigned int *)address; } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t* address) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) { - return *address; + return *(volatile Py_ssize_t *)address; } static inline void* -_Py_atomic_load_ptr_relaxed(const volatile void* address) +_Py_atomic_load_ptr_relaxed(const void *address) { return *(void * volatile *)address; } - static inline void -_Py_atomic_store_int(volatile int *address, int value) +_Py_atomic_store_int(int *address, int value) { _InterlockedExchange((volatile long*)address, (long)value); } static inline void -_Py_atomic_store_int8(volatile int8_t *address, int8_t value) +_Py_atomic_store_int8(int8_t *address, int8_t value) { _InterlockedExchange8((volatile char*)address, (char)value); } static inline void -_Py_atomic_store_int16(volatile int16_t *address, int16_t value) +_Py_atomic_store_int16(int16_t *address, int16_t value) { _InterlockedExchange16((volatile short*)address, (short)value); } static inline void -_Py_atomic_store_int32(volatile int32_t *address, int32_t value) +_Py_atomic_store_int32(int32_t *address, int32_t value) { _InterlockedExchange((volatile long*)address, (long)value); } static inline void -_Py_atomic_store_int64(volatile int64_t *address, int64_t value) +_Py_atomic_store_int64(int64_t *address, int64_t value) { _Py_atomic_exchange_int64(address, value); } static inline void -_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_store_intptr(intptr_t *address, intptr_t value) { _InterlockedExchangePointer((void * volatile *)address, (void *)value); } static inline void -_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_store_uint8(uint8_t *address, uint8_t value) { _InterlockedExchange8((volatile char*)address, (char)value); } static inline void -_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_store_uint16(uint16_t *address, uint16_t value) { _InterlockedExchange16((volatile short*)address, (short)value); } static inline void -_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_store_uint32(uint32_t *address, uint32_t value) { _InterlockedExchange((volatile long*)address, (long)value); } static inline void -_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64(uint64_t *address, uint64_t value) { _Py_atomic_exchange_int64((volatile __int64*)address, (__int64)value); } static inline void -_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value) { _InterlockedExchangePointer((void * volatile *)address, (void *)value); } static inline void -_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_store_uint(unsigned int *address, unsigned int value) { _InterlockedExchange((volatile long*)address, (long)value); } static inline void -_Py_atomic_store_ptr(volatile void *address, void *value) +_Py_atomic_store_ptr(void *address, void *value) { _InterlockedExchangePointer((void * volatile *)address, (void *)value); } static inline void -_Py_atomic_store_ssize(volatile Py_ssize_t* address, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *address, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - _InterlockedExchange64((volatile __int64*)address, (__int64)value); + _InterlockedExchange64((volatile __int64 *)address, (__int64)value); #else _InterlockedExchange((volatile long*)address, (long)value); #endif @@ -727,94 +726,94 @@ _Py_atomic_store_ssize(volatile Py_ssize_t* address, Py_ssize_t value) static inline void -_Py_atomic_store_int_relaxed(volatile int* address, int value) +_Py_atomic_store_int_relaxed(int *address, int value) { - *address = value; + *(volatile int *)address = value; } static inline void -_Py_atomic_store_int8_relaxed(volatile int8_t* address, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) { - *address = value; + *(volatile int8_t *)address = value; } static inline void -_Py_atomic_store_int16_relaxed(volatile int16_t* address, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) { - *address = value; + *(volatile int16_t *)address = value; } static inline void -_Py_atomic_store_int32_relaxed(volatile int32_t* address, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) { - *address = value; + *(volatile int32_t *)address = value; } static inline void -_Py_atomic_store_int64_relaxed(volatile int64_t* address, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) { - *address = value; + *(volatile int64_t *)address = value; } static inline void -_Py_atomic_store_intptr_relaxed(volatile intptr_t* address, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) { - *address = value; + *(volatile intptr_t *)address = value; } static inline void -_Py_atomic_store_uint8_relaxed(volatile uint8_t* address, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) { - *address = value; + *(volatile uint8_t *)address = value; } static inline void -_Py_atomic_store_uint16_relaxed(volatile uint16_t* address, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) { - *address = value; + *(volatile uint16_t *)address = value; } static inline void -_Py_atomic_store_uint32_relaxed(volatile uint32_t* address, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) { - *address = value; + *(volatile uint32_t *)address = value; } static inline void -_Py_atomic_store_uint64_relaxed(volatile uint64_t* address, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) { - *address = value; + *(volatile uint64_t *)address = value; } static inline void -_Py_atomic_store_uintptr_relaxed(volatile uintptr_t* address, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) { - *address = value; + *(volatile uintptr_t *)address = value; } static inline void -_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) { - *address = value; + *(volatile unsigned int *)address = value; } static inline void -_Py_atomic_store_ptr_relaxed(volatile void* address, void* value) +_Py_atomic_store_ptr_relaxed(void *address, void* value) { *(void * volatile *)address = value; } static inline void -_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t* address, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) { - *address = value; + *(volatile Py_ssize_t *)address = value; } static inline void -_Py_atomic_store_uint64_release(volatile uint64_t* address, uint64_t value) +_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value) { #if defined(_M_X64) || defined(_M_IX86) - *address = value; + *(volatile uint64_t *)address = value; #elif defined(_M_ARM64) __stlr64(address, value); #else @@ -823,7 +822,7 @@ _Py_atomic_store_uint64_release(volatile uint64_t* address, uint64_t value) } static inline void -_Py_atomic_store_ptr_release(volatile void* address, void* value) +_Py_atomic_store_ptr_release(void *address, void *value) { #if defined(_M_X64) || defined(_M_IX86) *(void * volatile *)address = value; diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index afc4e76eed120d..253cbaa3887f92 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -19,769 +19,769 @@ extern "C++" { static inline int -_Py_atomic_add_int(volatile int *address, int value) +_Py_atomic_add_int(int *address, int value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(int)*)address, value); + return atomic_fetch_add((_Atomic(int)*)address, value); } static inline int8_t -_Py_atomic_add_int8(volatile int8_t *address, int8_t value) +_Py_atomic_add_int8(int8_t *address, int8_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(int8_t)*)address, value); + return atomic_fetch_add((_Atomic(int8_t)*)address, value); } static inline int16_t -_Py_atomic_add_int16(volatile int16_t *address, int16_t value) +_Py_atomic_add_int16(int16_t *address, int16_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(int16_t)*)address, value); + return atomic_fetch_add((_Atomic(int16_t)*)address, value); } static inline int32_t -_Py_atomic_add_int32(volatile int32_t *address, int32_t value) +_Py_atomic_add_int32(int32_t *address, int32_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(int32_t)*)address, value); + return atomic_fetch_add((_Atomic(int32_t)*)address, value); } static inline int64_t -_Py_atomic_add_int64(volatile int64_t *address, int64_t value) +_Py_atomic_add_int64(int64_t *address, int64_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(int64_t)*)address, value); + return atomic_fetch_add((_Atomic(int64_t)*)address, value); } static inline intptr_t -_Py_atomic_add_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_add_intptr(intptr_t *address, intptr_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(intptr_t)*)address, value); + return atomic_fetch_add((_Atomic(intptr_t)*)address, value); } static inline unsigned int -_Py_atomic_add_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_add_uint(unsigned int *address, unsigned int value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(unsigned int)*)address, value); + return atomic_fetch_add((_Atomic(unsigned int)*)address, value); } static inline uint8_t -_Py_atomic_add_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_add_uint8(uint8_t *address, uint8_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(uint8_t)*)address, value); + return atomic_fetch_add((_Atomic(uint8_t)*)address, value); } static inline uint16_t -_Py_atomic_add_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_add_uint16(uint16_t *address, uint16_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(uint16_t)*)address, value); + return atomic_fetch_add((_Atomic(uint16_t)*)address, value); } static inline uint32_t -_Py_atomic_add_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_add_uint32(uint32_t *address, uint32_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(uint32_t)*)address, value); + return atomic_fetch_add((_Atomic(uint32_t)*)address, value); } static inline uint64_t -_Py_atomic_add_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_add_uint64(uint64_t *address, uint64_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(uint64_t)*)address, value); + return atomic_fetch_add((_Atomic(uint64_t)*)address, value); } static inline uintptr_t -_Py_atomic_add_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(uintptr_t)*)address, value); + return atomic_fetch_add((_Atomic(uintptr_t)*)address, value); } static inline Py_ssize_t -_Py_atomic_add_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) { _Py_USING_STD - return atomic_fetch_add((volatile _Atomic(Py_ssize_t)*)address, value); + return atomic_fetch_add((_Atomic(Py_ssize_t)*)address, value); } static inline int -_Py_atomic_compare_exchange_int(volatile int *address, int expected, int value) +_Py_atomic_compare_exchange_int(int *address, int expected, int value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(int)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_int8(volatile int8_t *address, int8_t expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(int8_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int8_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_int16(volatile int16_t *address, int16_t expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(int16_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int16_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_int32(volatile int32_t *address, int32_t expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(int32_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int32_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_int64(volatile int64_t *address, int64_t expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(int64_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int64_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_intptr(volatile intptr_t *address, intptr_t expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(intptr_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(intptr_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_uint(volatile unsigned int *address, unsigned int expected, unsigned int value) +_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(unsigned int)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(unsigned int)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_uint8(volatile uint8_t *address, uint8_t expected, uint8_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(uint8_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint8_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_uint16(volatile uint16_t *address, uint16_t expected, uint16_t value) +_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(uint16_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint16_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_uint32(volatile uint32_t *address, uint32_t expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(uint32_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint32_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_uint64(volatile uint64_t *address, uint64_t expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(uint64_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint64_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_uintptr(volatile uintptr_t *address, uintptr_t expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(uintptr_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(Py_ssize_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)address, &expected, value); } static inline int -_Py_atomic_compare_exchange_ptr(volatile void *address, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) { _Py_USING_STD - return atomic_compare_exchange_strong((volatile _Atomic(void *)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(void *)*)address, &expected, value); } static inline int -_Py_atomic_exchange_int(volatile int *address, int value) +_Py_atomic_exchange_int(int *address, int value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(int)*)address, value); + return atomic_exchange((_Atomic(int)*)address, value); } static inline int8_t -_Py_atomic_exchange_int8(volatile int8_t *address, int8_t value) +_Py_atomic_exchange_int8(int8_t *address, int8_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(int8_t)*)address, value); + return atomic_exchange((_Atomic(int8_t)*)address, value); } static inline int16_t -_Py_atomic_exchange_int16(volatile int16_t *address, int16_t value) +_Py_atomic_exchange_int16(int16_t *address, int16_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(int16_t)*)address, value); + return atomic_exchange((_Atomic(int16_t)*)address, value); } static inline int32_t -_Py_atomic_exchange_int32(volatile int32_t *address, int32_t value) +_Py_atomic_exchange_int32(int32_t *address, int32_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(int32_t)*)address, value); + return atomic_exchange((_Atomic(int32_t)*)address, value); } static inline int64_t -_Py_atomic_exchange_int64(volatile int64_t *address, int64_t value) +_Py_atomic_exchange_int64(int64_t *address, int64_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(int64_t)*)address, value); + return atomic_exchange((_Atomic(int64_t)*)address, value); } static inline intptr_t -_Py_atomic_exchange_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(intptr_t)*)address, value); + return atomic_exchange((_Atomic(intptr_t)*)address, value); } static inline unsigned int -_Py_atomic_exchange_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *address, unsigned int value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(unsigned int)*)address, value); + return atomic_exchange((_Atomic(unsigned int)*)address, value); } static inline uint8_t -_Py_atomic_exchange_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(uint8_t)*)address, value); + return atomic_exchange((_Atomic(uint8_t)*)address, value); } static inline uint16_t -_Py_atomic_exchange_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(uint16_t)*)address, value); + return atomic_exchange((_Atomic(uint16_t)*)address, value); } static inline uint32_t -_Py_atomic_exchange_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(uint32_t)*)address, value); + return atomic_exchange((_Atomic(uint32_t)*)address, value); } static inline uint64_t -_Py_atomic_exchange_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(uint64_t)*)address, value); + return atomic_exchange((_Atomic(uint64_t)*)address, value); } static inline uintptr_t -_Py_atomic_exchange_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(uintptr_t)*)address, value); + return atomic_exchange((_Atomic(uintptr_t)*)address, value); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(Py_ssize_t)*)address, value); + return atomic_exchange((_Atomic(Py_ssize_t)*)address, value); } static inline void * -_Py_atomic_exchange_ptr(volatile void *address, void *value) +_Py_atomic_exchange_ptr(void *address, void *value) { _Py_USING_STD - return atomic_exchange((volatile _Atomic(void *)*)address, value); + return atomic_exchange((_Atomic(void *)*)address, value); } static inline uint8_t -_Py_atomic_and_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_and_uint8(uint8_t *address, uint8_t value) { _Py_USING_STD - return atomic_fetch_and((volatile _Atomic(uint8_t)*)address, value); + return atomic_fetch_and((_Atomic(uint8_t)*)address, value); } static inline uint16_t -_Py_atomic_and_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_and_uint16(uint16_t *address, uint16_t value) { _Py_USING_STD - return atomic_fetch_and((volatile _Atomic(uint16_t)*)address, value); + return atomic_fetch_and((_Atomic(uint16_t)*)address, value); } static inline uint32_t -_Py_atomic_and_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_and_uint32(uint32_t *address, uint32_t value) { _Py_USING_STD - return atomic_fetch_and((volatile _Atomic(uint32_t)*)address, value); + return atomic_fetch_and((_Atomic(uint32_t)*)address, value); } static inline uint64_t -_Py_atomic_and_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_and_uint64(uint64_t *address, uint64_t value) { _Py_USING_STD - return atomic_fetch_and((volatile _Atomic(uint64_t)*)address, value); + return atomic_fetch_and((_Atomic(uint64_t)*)address, value); } static inline uintptr_t -_Py_atomic_and_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value) { _Py_USING_STD - return atomic_fetch_and((volatile _Atomic(uintptr_t)*)address, value); + return atomic_fetch_and((_Atomic(uintptr_t)*)address, value); } static inline uint8_t -_Py_atomic_or_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_or_uint8(uint8_t *address, uint8_t value) { _Py_USING_STD - return atomic_fetch_or((volatile _Atomic(uint8_t)*)address, value); + return atomic_fetch_or((_Atomic(uint8_t)*)address, value); } static inline uint16_t -_Py_atomic_or_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_or_uint16(uint16_t *address, uint16_t value) { _Py_USING_STD - return atomic_fetch_or((volatile _Atomic(uint16_t)*)address, value); + return atomic_fetch_or((_Atomic(uint16_t)*)address, value); } static inline uint32_t -_Py_atomic_or_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_or_uint32(uint32_t *address, uint32_t value) { _Py_USING_STD - return atomic_fetch_or((volatile _Atomic(uint32_t)*)address, value); + return atomic_fetch_or((_Atomic(uint32_t)*)address, value); } static inline uint64_t -_Py_atomic_or_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_or_uint64(uint64_t *address, uint64_t value) { _Py_USING_STD - return atomic_fetch_or((volatile _Atomic(uint64_t)*)address, value); + return atomic_fetch_or((_Atomic(uint64_t)*)address, value); } static inline uintptr_t -_Py_atomic_or_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value) { _Py_USING_STD - return atomic_fetch_or((volatile _Atomic(uintptr_t)*)address, value); + return atomic_fetch_or((_Atomic(uintptr_t)*)address, value); } static inline int -_Py_atomic_load_int(const volatile int *address) +_Py_atomic_load_int(const int *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(int)*)address); + return atomic_load((const _Atomic(int)*)address); } static inline int8_t -_Py_atomic_load_int8(const volatile int8_t *address) +_Py_atomic_load_int8(const int8_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(int8_t)*)address); + return atomic_load((const _Atomic(int8_t)*)address); } static inline int16_t -_Py_atomic_load_int16(const volatile int16_t *address) +_Py_atomic_load_int16(const int16_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(int16_t)*)address); + return atomic_load((const _Atomic(int16_t)*)address); } static inline int32_t -_Py_atomic_load_int32(const volatile int32_t *address) +_Py_atomic_load_int32(const int32_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(int32_t)*)address); + return atomic_load((const _Atomic(int32_t)*)address); } static inline int64_t -_Py_atomic_load_int64(const volatile int64_t *address) +_Py_atomic_load_int64(const int64_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(int64_t)*)address); + return atomic_load((const _Atomic(int64_t)*)address); } static inline intptr_t -_Py_atomic_load_intptr(const volatile intptr_t *address) +_Py_atomic_load_intptr(const intptr_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(intptr_t)*)address); + return atomic_load((const _Atomic(intptr_t)*)address); } static inline uint8_t -_Py_atomic_load_uint8(const volatile uint8_t *address) +_Py_atomic_load_uint8(const uint8_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(uint8_t)*)address); + return atomic_load((const _Atomic(uint8_t)*)address); } static inline uint16_t -_Py_atomic_load_uint16(const volatile uint16_t *address) +_Py_atomic_load_uint16(const uint16_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(uint32_t)*)address); + return atomic_load((const _Atomic(uint32_t)*)address); } static inline uint32_t -_Py_atomic_load_uint32(const volatile uint32_t *address) +_Py_atomic_load_uint32(const uint32_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(uint32_t)*)address); + return atomic_load((const _Atomic(uint32_t)*)address); } static inline uint64_t -_Py_atomic_load_uint64(const volatile uint64_t *address) +_Py_atomic_load_uint64(const uint64_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(uint64_t)*)address); + return atomic_load((const _Atomic(uint64_t)*)address); } static inline uintptr_t -_Py_atomic_load_uintptr(const volatile uintptr_t *address) +_Py_atomic_load_uintptr(const uintptr_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(uintptr_t)*)address); + return atomic_load((const _Atomic(uintptr_t)*)address); } static inline unsigned int -_Py_atomic_load_uint(const volatile unsigned int *address) +_Py_atomic_load_uint(const unsigned int *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(unsigned int)*)address); + return atomic_load((const _Atomic(unsigned int)*)address); } static inline Py_ssize_t -_Py_atomic_load_ssize(const volatile Py_ssize_t *address) +_Py_atomic_load_ssize(const Py_ssize_t *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(Py_ssize_t)*)address); + return atomic_load((const _Atomic(Py_ssize_t)*)address); } static inline void * -_Py_atomic_load_ptr(const volatile void *address) +_Py_atomic_load_ptr(const void *address) { _Py_USING_STD - return atomic_load((const volatile _Atomic(void*)*)address); + return atomic_load((const _Atomic(void*)*)address); } static inline int -_Py_atomic_load_int_relaxed(const volatile int *address) +_Py_atomic_load_int_relaxed(const int *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(int)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int)*)address, memory_order_relaxed); } static inline int8_t -_Py_atomic_load_int8_relaxed(const volatile int8_t *address) +_Py_atomic_load_int8_relaxed(const int8_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(int8_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int8_t)*)address, memory_order_relaxed); } static inline int16_t -_Py_atomic_load_int16_relaxed(const volatile int16_t *address) +_Py_atomic_load_int16_relaxed(const int16_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(int16_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int16_t)*)address, memory_order_relaxed); } static inline int32_t -_Py_atomic_load_int32_relaxed(const volatile int32_t *address) +_Py_atomic_load_int32_relaxed(const int32_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(int32_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int32_t)*)address, memory_order_relaxed); } static inline int64_t -_Py_atomic_load_int64_relaxed(const volatile int64_t *address) +_Py_atomic_load_int64_relaxed(const int64_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(int64_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int64_t)*)address, memory_order_relaxed); } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const volatile intptr_t *address) +_Py_atomic_load_intptr_relaxed(const intptr_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(intptr_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(intptr_t)*)address, memory_order_relaxed); } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const volatile uint8_t *address) +_Py_atomic_load_uint8_relaxed(const uint8_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(uint8_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint8_t)*)address, memory_order_relaxed); } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const volatile uint16_t *address) +_Py_atomic_load_uint16_relaxed(const uint16_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(uint16_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint16_t)*)address, memory_order_relaxed); } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const volatile uint32_t *address) +_Py_atomic_load_uint32_relaxed(const uint32_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(uint32_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint32_t)*)address, memory_order_relaxed); } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const volatile uint64_t *address) +_Py_atomic_load_uint64_relaxed(const uint64_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(uint64_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint64_t)*)address, memory_order_relaxed); } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const volatile uintptr_t *address) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(uintptr_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uintptr_t)*)address, memory_order_relaxed); } static inline unsigned int -_Py_atomic_load_uint_relaxed(const volatile unsigned int *address) +_Py_atomic_load_uint_relaxed(const unsigned int *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(unsigned int)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(unsigned int)*)address, memory_order_relaxed); } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const volatile Py_ssize_t *address) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(Py_ssize_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(Py_ssize_t)*)address, memory_order_relaxed); } static inline void * -_Py_atomic_load_ptr_relaxed(const volatile void *address) +_Py_atomic_load_ptr_relaxed(const void *address) { _Py_USING_STD - return atomic_load_explicit((const volatile _Atomic(void*)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(void*)*)address, memory_order_relaxed); } static inline void -_Py_atomic_store_int(volatile int *address, int value) +_Py_atomic_store_int(int *address, int value) { _Py_USING_STD - atomic_store((volatile _Atomic(int)*)address, value); + atomic_store((_Atomic(int)*)address, value); } static inline void -_Py_atomic_store_int8(volatile int8_t *address, int8_t value) +_Py_atomic_store_int8(int8_t *address, int8_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(int8_t)*)address, value); + atomic_store((_Atomic(int8_t)*)address, value); } static inline void -_Py_atomic_store_int16(volatile int16_t *address, int16_t value) +_Py_atomic_store_int16(int16_t *address, int16_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(int16_t)*)address, value); + atomic_store((_Atomic(int16_t)*)address, value); } static inline void -_Py_atomic_store_int32(volatile int32_t *address, int32_t value) +_Py_atomic_store_int32(int32_t *address, int32_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(int32_t)*)address, value); + atomic_store((_Atomic(int32_t)*)address, value); } static inline void -_Py_atomic_store_int64(volatile int64_t *address, int64_t value) +_Py_atomic_store_int64(int64_t *address, int64_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(int64_t)*)address, value); + atomic_store((_Atomic(int64_t)*)address, value); } static inline void -_Py_atomic_store_intptr(volatile intptr_t *address, intptr_t value) +_Py_atomic_store_intptr(intptr_t *address, intptr_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(intptr_t)*)address, value); + atomic_store((_Atomic(intptr_t)*)address, value); } static inline void -_Py_atomic_store_uint8(volatile uint8_t *address, uint8_t value) +_Py_atomic_store_uint8(uint8_t *address, uint8_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(uint8_t)*)address, value); + atomic_store((_Atomic(uint8_t)*)address, value); } static inline void -_Py_atomic_store_uint16(volatile uint16_t *address, uint16_t value) +_Py_atomic_store_uint16(uint16_t *address, uint16_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(uint16_t)*)address, value); + atomic_store((_Atomic(uint16_t)*)address, value); } static inline void -_Py_atomic_store_uint32(volatile uint32_t *address, uint32_t value) +_Py_atomic_store_uint32(uint32_t *address, uint32_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(uint32_t)*)address, value); + atomic_store((_Atomic(uint32_t)*)address, value); } static inline void -_Py_atomic_store_uint64(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64(uint64_t *address, uint64_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(uint64_t)*)address, value); + atomic_store((_Atomic(uint64_t)*)address, value); } static inline void -_Py_atomic_store_uintptr(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(uintptr_t)*)address, value); + atomic_store((_Atomic(uintptr_t)*)address, value); } static inline void -_Py_atomic_store_uint(volatile unsigned int *address, unsigned int value) +_Py_atomic_store_uint(unsigned int *address, unsigned int value) { _Py_USING_STD - atomic_store((volatile _Atomic(unsigned int)*)address, value); + atomic_store((_Atomic(unsigned int)*)address, value); } static inline void -_Py_atomic_store_ptr(volatile void *address, void *value) +_Py_atomic_store_ptr(void *address, void *value) { _Py_USING_STD - atomic_store((volatile _Atomic(void*)*)address, value); + atomic_store((_Atomic(void*)*)address, value); } static inline void -_Py_atomic_store_ssize(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *address, Py_ssize_t value) { _Py_USING_STD - atomic_store((volatile _Atomic(Py_ssize_t)*)address, value); + atomic_store((_Atomic(Py_ssize_t)*)address, value); } static inline void -_Py_atomic_store_int_relaxed(volatile int *address, int value) +_Py_atomic_store_int_relaxed(int *address, int value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(int)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int8_relaxed(volatile int8_t *address, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(int8_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int8_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int16_relaxed(volatile int16_t *address, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(int16_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int16_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int32_relaxed(volatile int32_t *address, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(int32_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int32_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int64_relaxed(volatile int64_t *address, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(int64_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int64_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_intptr_relaxed(volatile intptr_t *address, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(intptr_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(intptr_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint8_relaxed(volatile uint8_t *address, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(uint8_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint8_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint16_relaxed(volatile uint16_t *address, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(uint16_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint16_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint32_relaxed(volatile uint32_t *address, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(uint32_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint32_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint64_relaxed(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(uint64_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint64_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uintptr_relaxed(volatile uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(uintptr_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uintptr_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint_relaxed(volatile unsigned int *address, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(unsigned int)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(unsigned int)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_ptr_relaxed(volatile void *address, void *value) +_Py_atomic_store_ptr_relaxed(void *address, void *value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(void*)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(void*)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_ssize_relaxed(volatile Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(Py_ssize_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(Py_ssize_t)*)address, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint64_release(volatile uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(uint64_t)*)address, value, memory_order_release); + atomic_store_explicit((_Atomic(uint64_t)*)address, value, memory_order_release); } static inline void -_Py_atomic_store_ptr_release(volatile void *address, void *value) +_Py_atomic_store_ptr_release(void *address, void *value) { _Py_USING_STD - atomic_store_explicit((volatile _Atomic(void*)*)address, value, memory_order_release); + atomic_store_explicit((_Atomic(void*)*)address, value, memory_order_release); } static inline void From 60b56f19a6e69e1b2d85438544eb9bcee8354cbf Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 12:01:32 -0700 Subject: [PATCH 07/24] Add code documentation to pyatomic.h --- Include/cpython/pyatomic.h | 47 +++++++++++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 11 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 85592c28f26a21..4bf699d0fb31a4 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -1,6 +1,27 @@ #ifndef Py_ATOMIC_H #define Py_ATOMIC_H +// This header provides cross-platform low-level atomic operations +// similar to C11 atomics. +// +// Operations are sequentially consistent unless they have a suffix indicating +// otherwise. If in doubt, prefer the sequentially consistent operations. +// +// The "_relaxed" suffix for load and store operations indicates the "relaxed" +// memory order. They don't provide synchronization, but (roughly speaking) +// guarantee somewhat sane behavior for races instead of undefined behavior. +// In practice, they correspond to "normal" hardware load and store instructions, +// so they are almost as inexpensive as plain loads and stores in C. +// +// Note that atomic read-modify-write operations like _Py_atomic_add_* return +// the previous value of the atomic variable, not the new value. +// +// See https://en.cppreference.com/w/c/atomic for more information on C11 atomics. +// See https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2020/p2055r0.pdf +// "A Relaxed Guide to memory_order_relaxed" for discussion of and common usage +// or relaxed atomics. + +// Atomically adds `value` to `address` and returns the previous value static inline int _Py_atomic_add_int(int *address, int value); @@ -40,7 +61,9 @@ _Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value); static inline Py_ssize_t _Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value); - +// Performs an atomic compare-and-exchange. If `*address` and `expected` are equal, +// then `value` is stored in `*address`. Returns 1 on success and 0 on failure. +// These correspond to the "strong" variations of the C11 atomic_compare_exchange_* functions. static inline int _Py_atomic_compare_exchange_int(int *address, int expected, int value); @@ -83,7 +106,7 @@ _Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_s static inline int _Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value); - +// Atomically replaces `*address` with value and returns the previously value of *address. static inline int _Py_atomic_exchange_int(int *address, int value); @@ -126,7 +149,7 @@ _Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value); static inline void * _Py_atomic_exchange_ptr(void *address, void *value); - +// Performs `*address &= value` atomically and returns the previous value of *address. static inline uint8_t _Py_atomic_and_uint8(uint8_t *address, uint8_t value); @@ -142,7 +165,7 @@ _Py_atomic_and_uint64(uint64_t *address, uint64_t value); static inline uintptr_t _Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value); - +// Performs `*address |= value` atomically and returns the previous value of *address. static inline uint8_t _Py_atomic_or_uint8(uint8_t *address, uint8_t value); @@ -158,7 +181,7 @@ _Py_atomic_or_uint64(uint64_t *address, uint64_t value); static inline uintptr_t _Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value); - +// Atomically loads `*address` (sequential consistency) static inline int _Py_atomic_load_int(const int *address); @@ -201,7 +224,7 @@ _Py_atomic_load_ssize(const Py_ssize_t *address); static inline void * _Py_atomic_load_ptr(const void *address); - +// Loads `*address` (relaxed consistency, i.e., no ordering) static inline int _Py_atomic_load_int_relaxed(const int *address); @@ -244,7 +267,7 @@ _Py_atomic_load_ssize_relaxed(const Py_ssize_t *address); static inline void * _Py_atomic_load_ptr_relaxed(const void *address); - +// Atomically performs `*address = value` (sequential consistency) static inline void _Py_atomic_store_int(int *address, int value); @@ -287,7 +310,7 @@ _Py_atomic_store_ptr(void *address, void *value); static inline void _Py_atomic_store_ssize(Py_ssize_t* address, Py_ssize_t value); - +// Stores `*address = value` (relaxed consistency, i.e., no ordering) static inline void _Py_atomic_store_int_relaxed(int *address, int value); @@ -330,7 +353,7 @@ _Py_atomic_store_ptr_relaxed(void *address, void *value); static inline void _Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value); - +// Stores `*address = value` (release operation) static inline void _Py_atomic_store_uint64_release(uint64_t *address, uint64_t value); @@ -338,10 +361,12 @@ static inline void _Py_atomic_store_ptr_release(void *address, void *value); - static inline void +// Sequential consistency fence +static inline void _Py_atomic_fence_seq_cst(void); - static inline void +// Release fence +static inline void _Py_atomic_fence_release(void); From 0131868d353aaef6c051af388e6c0bb391ed61df Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 12:02:20 -0700 Subject: [PATCH 08/24] Fix typo --- Include/cpython/pyatomic.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 4bf699d0fb31a4..0fe3653292d44b 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -106,7 +106,7 @@ _Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_s static inline int _Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value); -// Atomically replaces `*address` with value and returns the previously value of *address. +// Atomically replaces `*address` with `value` and returns the previous value of `*address`. static inline int _Py_atomic_exchange_int(int *address, int value); @@ -149,7 +149,7 @@ _Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value); static inline void * _Py_atomic_exchange_ptr(void *address, void *value); -// Performs `*address &= value` atomically and returns the previous value of *address. +// Performs `*address &= value` atomically and returns the previous value of `*address`. static inline uint8_t _Py_atomic_and_uint8(uint8_t *address, uint8_t value); @@ -165,7 +165,7 @@ _Py_atomic_and_uint64(uint64_t *address, uint64_t value); static inline uintptr_t _Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value); -// Performs `*address |= value` atomically and returns the previous value of *address. +// Performs `*address |= value` atomically and returns the previous value of `*address`. static inline uint8_t _Py_atomic_or_uint8(uint8_t *address, uint8_t value); From 0474e2f380eb1ec6c591a8f57390893826dae8b7 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 12:04:00 -0700 Subject: [PATCH 09/24] Revert inadvertent change to stable_abi.py --- Tools/build/stable_abi.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Tools/build/stable_abi.py b/Tools/build/stable_abi.py index 8b958cc71c7aed..7cba788ff33578 100644 --- a/Tools/build/stable_abi.py +++ b/Tools/build/stable_abi.py @@ -39,6 +39,7 @@ "genobject.h", "longintrepr.h", "parsetok.h", + "pyatomic.h", "token.h", "ucnhash.h", } From 462c20a4aa0b72803f2af894c537dad3dec9336f Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 17:15:49 -0400 Subject: [PATCH 10/24] Update Include/cpython/pyatomic.h Co-authored-by: Victor Stinner --- Include/cpython/pyatomic.h | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 0fe3653292d44b..871a8ddea8007f 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -371,13 +371,13 @@ _Py_atomic_fence_release(void); #ifndef _Py_USE_GCC_BUILTIN_ATOMICS -#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)) -#define _Py_USE_GCC_BUILTIN_ATOMICS 1 -#elif defined(__clang__) -#if __has_builtin(__atomic_load) -#define _Py_USE_GCC_BUILTIN_ATOMICS 1 -#endif -#endif +# if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)) +# define _Py_USE_GCC_BUILTIN_ATOMICS 1 +# elif defined(__clang__) +# if __has_builtin(__atomic_load) +# define _Py_USE_GCC_BUILTIN_ATOMICS 1 +# endif +# endif #endif #if _Py_USE_GCC_BUILTIN_ATOMICS From 3078328343e0429dc578d96f4439568559c56bca Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 17:15:57 -0400 Subject: [PATCH 11/24] Update Include/cpython/pyatomic.h Co-authored-by: Victor Stinner --- Include/cpython/pyatomic.h | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 871a8ddea8007f..c49fbd57e4bf94 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -381,16 +381,16 @@ _Py_atomic_fence_release(void); #endif #if _Py_USE_GCC_BUILTIN_ATOMICS -#define Py_ATOMIC_GCC_H -#include "cpython/pyatomic_gcc.h" +# define Py_ATOMIC_GCC_H +# include "cpython/pyatomic_gcc.h" #elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_ATOMICS__) -#define Py_ATOMIC_STD_H -#include "cpython/pyatomic_std.h" +# define Py_ATOMIC_STD_H +# include "cpython/pyatomic_std.h" #elif defined(_MSC_VER) -#define Py_ATOMIC_MSC_H -#include "cpython/pyatomic_msc.h" +# define Py_ATOMIC_MSC_H +# include "cpython/pyatomic_msc.h" #else -#error "define pyatomic for this platform" +# error "no available pyatomic implementation for this platform/compiler" #endif #endif /* Py_ATOMIC_H */ From 4daf1a2daa7538fe29f4fc1ae4b4beae2a222da8 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 17:30:48 -0400 Subject: [PATCH 12/24] Update Include/cpython/pyatomic.h Co-authored-by: Victor Stinner --- Include/cpython/pyatomic.h | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index c49fbd57e4bf94..bf04b4a63d6f49 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -362,12 +362,10 @@ _Py_atomic_store_ptr_release(void *address, void *value); // Sequential consistency fence -static inline void -_Py_atomic_fence_seq_cst(void); +static inline void _Py_atomic_fence_seq_cst(void); // Release fence -static inline void -_Py_atomic_fence_release(void); +static inline void _Py_atomic_fence_release(void); #ifndef _Py_USE_GCC_BUILTIN_ATOMICS From f932c77fa100791b13aa6cf86932742a5c8aab8d Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 15:18:26 -0700 Subject: [PATCH 13/24] Move code documentation around. Fix volatile warning in MSVC. --- Include/cpython/pyatomic.h | 14 ++++++++------ Include/cpython/pyatomic_gcc.h | 8 ++++---- Include/cpython/pyatomic_msc.h | 12 ++++++------ Include/cpython/pyatomic_std.h | 8 ++++---- 4 files changed, 22 insertions(+), 20 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index bf04b4a63d6f49..bbc0b4c2f6e124 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -1,6 +1,3 @@ -#ifndef Py_ATOMIC_H -#define Py_ATOMIC_H - // This header provides cross-platform low-level atomic operations // similar to C11 atomics. // @@ -10,17 +7,22 @@ // The "_relaxed" suffix for load and store operations indicates the "relaxed" // memory order. They don't provide synchronization, but (roughly speaking) // guarantee somewhat sane behavior for races instead of undefined behavior. -// In practice, they correspond to "normal" hardware load and store instructions, -// so they are almost as inexpensive as plain loads and stores in C. +// In practice, they correspond to "normal" hardware load and store +// instructions, so they are almost as inexpensive as plain loads and stores +// in C. // // Note that atomic read-modify-write operations like _Py_atomic_add_* return // the previous value of the atomic variable, not the new value. // -// See https://en.cppreference.com/w/c/atomic for more information on C11 atomics. +// See https://en.cppreference.com/w/c/atomic for more information on C11 +// atomics. // See https://www.open-std.org/jtc1/sc22/wg21/docs/papers/2020/p2055r0.pdf // "A Relaxed Guide to memory_order_relaxed" for discussion of and common usage // or relaxed atomics. +#ifndef Py_ATOMIC_H +#define Py_ATOMIC_H + // Atomically adds `value` to `address` and returns the previous value static inline int _Py_atomic_add_int(int *address, int value); diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index cdf3497fd36f83..aff732ecfe61fd 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -1,12 +1,12 @@ -#ifndef Py_ATOMIC_GCC_H -# error "this header file must not be included directly" -#endif - // This is the implementation of Python atomic operations using GCC's built-in // functions that match the C+11 memory model. This implementation is preferred // for GCC compatible compilers, such as Clang. These functions are available in // GCC 4.8+ without needing to compile with --std=c11 or --std=gnu11. +#ifndef Py_ATOMIC_GCC_H +# error "this header file must not be included directly" +#endif + static inline int _Py_atomic_add_int(int *address, int value) { diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 291952dbe1c3a9..6da4903717ad93 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -1,10 +1,10 @@ +// This is the implementation of Python atomic operations for MSVC if the +// compiler does not support C11 or C++11 atomics. + #ifndef Py_ATOMIC_MSC_H # error "this header file must not be included directly" #endif -// This is the implementation of Python atomic operations for MSVC if the -// compiler does not support C11 or C++11 atomics. - #include @@ -85,7 +85,7 @@ _Py_atomic_add_uint32(uint32_t *address, uint32_t value) static inline uint64_t _Py_atomic_add_uint64(uint64_t *address, uint64_t value) { - return (uint64_t)_Py_atomic_add_int64((volatile int64_t*)address, (int64_t)value); + return (uint64_t)_Py_atomic_add_int64((int64_t*)address, (int64_t)value); } static inline uintptr_t @@ -270,7 +270,7 @@ _Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) static inline uint64_t _Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) { - return (uint64_t)_Py_atomic_exchange_int64((volatile __int64*)address, (__int64)value); + return (uint64_t)_Py_atomic_exchange_int64((int64_t *)address, (int64_t)value); } static inline uintptr_t @@ -693,7 +693,7 @@ _Py_atomic_store_uint32(uint32_t *address, uint32_t value) static inline void _Py_atomic_store_uint64(uint64_t *address, uint64_t value) { - _Py_atomic_exchange_int64((volatile __int64*)address, (__int64)value); + _Py_atomic_exchange_int64((int64_t *)address, (int64_t)value); } static inline void diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index 253cbaa3887f92..ae698057795829 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -1,11 +1,11 @@ -#ifndef Py_ATOMIC_STD_H -# error "this header file must not be included directly" -#endif - // This is the implementation of Python atomic operations using C++11 or C11 // atomics. Note that the pyatomic_gcc.h implementation is preferred for GCC // compatible compilers, even if they support C++11 atomics. +#ifndef Py_ATOMIC_STD_H +# error "this header file must not be included directly" +#endif + #ifdef __cplusplus extern "C++" { #include From e720736e97859b3a095ec5648ec11a77600c5b6e Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 15:54:00 -0700 Subject: [PATCH 14/24] Clean-up pyatomic_msc.h and add Py_BUILD_ASSERT --- Include/cpython/pyatomic_msc.h | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 6da4903717ad93..830334302d8e81 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -1,5 +1,8 @@ // This is the implementation of Python atomic operations for MSVC if the // compiler does not support C11 or C++11 atomics. +// +// MSVC intrinsics are defined on char, short, long, __int64, and pointer +// types. Note that long and int are both 32-bits even on 64-bit Windows. #ifndef Py_ATOMIC_MSC_H # error "this header file must not be included directly" @@ -7,28 +10,31 @@ #include - static inline int _Py_atomic_add_int(int *address, int value) { + Py_BUILD_ASSERT(sizeof(int) == sizeof(long)); return (int)_InterlockedExchangeAdd((volatile long*)address, (long)value); } static inline int8_t _Py_atomic_add_int8(int8_t *address, int8_t value) { + Py_BUILD_ASSERT(sizeof(int8_t) == sizeof(char)); return (int8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); } static inline int16_t _Py_atomic_add_int16(int16_t *address, int16_t value) { + Py_BUILD_ASSERT(sizeof(int16_t) == sizeof(short)); return (int16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); } static inline int32_t _Py_atomic_add_int32(int32_t *address, int32_t value) { + Py_BUILD_ASSERT(sizeof(int32_t) == sizeof(long)); return (int32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); } @@ -39,9 +45,9 @@ _Py_atomic_add_int64(int64_t *address, int64_t value) return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); #else for (;;) { - __int64 old_value = *(volatile __int64*)address; - __int64 new_value = old_value + (__int64)value; - if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, new_value, old_value)) { + int64_t old_value = *(volatile int64_t*)address; + int64_t new_value = old_value + value; + if (_Py_atomic_compare_exchange_int64(address, old_value, new_value)) { return old_value; } } @@ -228,9 +234,8 @@ _Py_atomic_exchange_int64(int64_t *address, int64_t value) return (int64_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); #else for (;;) { - __int64 old_value = *(volatile __int64*)address; - __int64 new_value = (__int64)value; - if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, new_value, old_value)) { + int64_t old_value = *(volatile int64_t*)address; + if (old_value == _Py_atomic_compare_exchange_int64(address, old_value, value)) { return old_value; } } @@ -322,7 +327,7 @@ _Py_atomic_and_uint64(uint64_t *address, uint64_t value) for (;;) { uint64_t old_value = *(volatile uint64_t*)address; uint64_t new_value = old_value & value; - if ((__int64)old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if (_Py_atomic_compare_exchange_uint64(address, old_value, new_value)) { return old_value; } } @@ -366,7 +371,7 @@ _Py_atomic_or_uint64(uint64_t *address, uint64_t value) for (;;) { uint64_t old_value = *(volatile uint64_t *)address; uint64_t new_value = old_value | value; - if (old_value == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)new_value, (__int64)old_value)) { + if (_Py_atomic_compare_exchange_uint64(address, old_value, new_value)) { return old_value; } } From ee6e49f6eacc1bd0069bf539189eeed6225d0589 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 23 Aug 2023 16:15:10 -0700 Subject: [PATCH 15/24] Add _Py_atomic_load_ptr_acquire. And remove _Py_atomic_store_uint64_release. --- Include/cpython/pyatomic.h | 8 +++++--- Include/cpython/pyatomic_gcc.h | 7 +++---- Include/cpython/pyatomic_msc.h | 10 +++++----- Include/cpython/pyatomic_std.h | 6 +++--- Modules/_testcapi/pyatomic.c | 12 ++++++++++++ 5 files changed, 28 insertions(+), 15 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index bbc0b4c2f6e124..96fd544b52a349 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -355,10 +355,12 @@ _Py_atomic_store_ptr_relaxed(void *address, void *value); static inline void _Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value); -// Stores `*address = value` (release operation) -static inline void -_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value); +// Loads `*address` (acquire operation) +static inline void * +_Py_atomic_load_ptr_acquire(const void *address); + +// Stores `*address = value` (release operation) static inline void _Py_atomic_store_ptr_release(void *address, void *value); diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index aff732ecfe61fd..422ce2a234b48a 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -653,11 +653,10 @@ _Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) __atomic_store_n(address, value, __ATOMIC_RELAXED); } - -static inline void -_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value) +static inline void * +_Py_atomic_load_ptr_acquire(const void *address) { - __atomic_store_n(address, value, __ATOMIC_RELEASE); + return (void *)__atomic_load_n((void **)address, __ATOMIC_ACQUIRE); } static inline void diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 830334302d8e81..188c5c0a75b86e 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -814,15 +814,15 @@ _Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) *(volatile Py_ssize_t *)address = value; } -static inline void -_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value) +static inline void * +_Py_atomic_load_ptr_acquire(const void *address) { #if defined(_M_X64) || defined(_M_IX86) - *(volatile uint64_t *)address = value; + return *(void * volatile *)address; #elif defined(_M_ARM64) - __stlr64(address, value); + return (void *)__ldar64((unsigned __int64 volatile*)address); #else -#error no implementation of _Py_atomic_store_uint64_release +#error no implementation of _Py_atomic_load_ptr_acquire #endif } diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index ae698057795829..708001d1b5ace9 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -770,11 +770,11 @@ _Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) atomic_store_explicit((_Atomic(Py_ssize_t)*)address, value, memory_order_relaxed); } -static inline void -_Py_atomic_store_uint64_release(uint64_t *address, uint64_t value) +static inline void * +_Py_atomic_load_ptr_acquire(const void *address) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint64_t)*)address, value, memory_order_release); + return atomic_load_explicit((const _Atomic(void*)*)address, memory_order_acquire); } static inline void diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index da54a87915923c..08bd6c91b87114 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -131,6 +131,17 @@ test_atomic_fences(PyObject *self, PyObject *obj) { Py_RETURN_NONE; } +static PyObject * +test_atomic_release_acquire(PyObject *self, PyObject *obj) { + void *x = NULL; + void *y = &y; + assert(_Py_atomic_load_ptr_acquire(&x) == NULL); + _Py_atomic_store_ptr_release(&x, y); + assert(x == y); + assert(_Py_atomic_load_ptr_acquire(&x) == y); + Py_RETURN_NONE; +} + // NOTE: all tests should start with "test_atomic_" to be included // in test_pyatomic.py @@ -152,6 +163,7 @@ static PyMethodDef test_methods[] = { FOR_ALL_TYPES(BIND_TEST_LOAD_STORE) FOR_BITWISE_TYPES(BIND_TEST_AND_OR) {"test_atomic_fences", test_atomic_fences, METH_NOARGS}, + {"test_atomic_release_acquire", test_atomic_release_acquire, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; From ca8c3b336674fc78ecf2b80860247ca0a23c82cf Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 24 Aug 2023 09:00:31 -0700 Subject: [PATCH 16/24] Format files --- Include/cpython/pyatomic_gcc.h | 47 +++++++----- Include/cpython/pyatomic_std.h | 132 ++++++++++++++++++++++----------- 2 files changed, 118 insertions(+), 61 deletions(-) diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index 422ce2a234b48a..b5292e5c12c8e3 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -1,7 +1,7 @@ // This is the implementation of Python atomic operations using GCC's built-in // functions that match the C+11 memory model. This implementation is preferred -// for GCC compatible compilers, such as Clang. These functions are available in -// GCC 4.8+ without needing to compile with --std=c11 or --std=gnu11. +// for GCC compatible compilers, such as Clang. These functions are available +// in GCC 4.8+ without needing to compile with --std=c11 or --std=gnu11. #ifndef Py_ATOMIC_GCC_H # error "this header file must not be included directly" @@ -89,86 +89,99 @@ _Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) static inline int _Py_atomic_compare_exchange_int(int *address, int expected, int value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) { - return __atomic_compare_exchange_n(address, &expected, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n(address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int _Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) { - void *e = expected; - return __atomic_compare_exchange_n((void **)address, &e, value, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); + return __atomic_compare_exchange_n((void **)address, &expected, value, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index 708001d1b5ace9..ddabb009ea49ab 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -113,98 +113,112 @@ static inline int _Py_atomic_compare_exchange_int(int *address, int expected, int value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int8_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int8_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int16_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int16_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int32_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int32_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int64_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(int64_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(intptr_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(intptr_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(unsigned int)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(unsigned int)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint8_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint8_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint16_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint16_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint32_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint32_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint64_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint64_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)address, + &expected, value); } static inline int _Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(void *)*)address, &expected, value); + return atomic_compare_exchange_strong((_Atomic(void *)*)address, + &expected, value); } @@ -480,98 +494,112 @@ static inline int _Py_atomic_load_int_relaxed(const int *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int)*)address, + memory_order_relaxed); } static inline int8_t _Py_atomic_load_int8_relaxed(const int8_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int8_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int8_t)*)address, + memory_order_relaxed); } static inline int16_t _Py_atomic_load_int16_relaxed(const int16_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int16_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int16_t)*)address, + memory_order_relaxed); } static inline int32_t _Py_atomic_load_int32_relaxed(const int32_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int32_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int32_t)*)address, + memory_order_relaxed); } static inline int64_t _Py_atomic_load_int64_relaxed(const int64_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int64_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(int64_t)*)address, + memory_order_relaxed); } static inline intptr_t _Py_atomic_load_intptr_relaxed(const intptr_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(intptr_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(intptr_t)*)address, + memory_order_relaxed); } static inline uint8_t _Py_atomic_load_uint8_relaxed(const uint8_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint8_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint8_t)*)address, + memory_order_relaxed); } static inline uint16_t _Py_atomic_load_uint16_relaxed(const uint16_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint16_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint16_t)*)address, + memory_order_relaxed); } static inline uint32_t _Py_atomic_load_uint32_relaxed(const uint32_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint32_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint32_t)*)address, + memory_order_relaxed); } static inline uint64_t _Py_atomic_load_uint64_relaxed(const uint64_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint64_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uint64_t)*)address, + memory_order_relaxed); } static inline uintptr_t _Py_atomic_load_uintptr_relaxed(const uintptr_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uintptr_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(uintptr_t)*)address, + memory_order_relaxed); } static inline unsigned int _Py_atomic_load_uint_relaxed(const unsigned int *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(unsigned int)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(unsigned int)*)address, + memory_order_relaxed); } static inline Py_ssize_t _Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(Py_ssize_t)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(Py_ssize_t)*)address, + memory_order_relaxed); } static inline void * _Py_atomic_load_ptr_relaxed(const void *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(void*)*)address, memory_order_relaxed); + return atomic_load_explicit((const _Atomic(void*)*)address, + memory_order_relaxed); } static inline void @@ -676,112 +704,128 @@ static inline void _Py_atomic_store_int_relaxed(int *address, int value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int8_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int8_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int16_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int16_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int32_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int32_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int64_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(int64_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(intptr_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(intptr_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint8_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint8_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint16_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint16_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint32_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint32_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint64_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uint64_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uintptr_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(uintptr_t)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) { _Py_USING_STD - atomic_store_explicit((_Atomic(unsigned int)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(unsigned int)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_ptr_relaxed(void *address, void *value) { _Py_USING_STD - atomic_store_explicit((_Atomic(void*)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(void*)*)address, value, + memory_order_relaxed); } static inline void _Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(Py_ssize_t)*)address, value, memory_order_relaxed); + atomic_store_explicit((_Atomic(Py_ssize_t)*)address, value, + memory_order_relaxed); } static inline void * _Py_atomic_load_ptr_acquire(const void *address) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(void*)*)address, memory_order_acquire); + return atomic_load_explicit((const _Atomic(void*)*)address, + memory_order_acquire); } static inline void _Py_atomic_store_ptr_release(void *address, void *value) { _Py_USING_STD - atomic_store_explicit((_Atomic(void*)*)address, value, memory_order_release); + atomic_store_explicit((_Atomic(void*)*)address, value, + memory_order_release); } static inline void From 2568ad9e7e3b0fd55aaea5a56bd89ce352483b7e Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 24 Aug 2023 11:05:39 -0700 Subject: [PATCH 17/24] Add note about atomics in whatsnew --- Doc/whatsnew/3.13.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index bfab868d1c5b62..22f2bd2d1574b2 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -798,6 +798,8 @@ Build Changes * SQLite 3.15.2 or newer is required to build the :mod:`sqlite3` extension module. (Contributed by Erlend Aasland in :gh:`105875`.) +* Building CPython now requires a compiler with support for the C11 atomic + library, GCC built-in atomic functions, or MSVC interlocked intrinsics. C API Changes ============= From 2d082902da001e4b71d7935f66ff95b605688149 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 24 Aug 2023 11:24:51 -0700 Subject: [PATCH 18/24] Describe volatile meaning on MSVC. --- Include/cpython/pyatomic_msc.h | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 188c5c0a75b86e..819a7cf89c7830 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -2,7 +2,12 @@ // compiler does not support C11 or C++11 atomics. // // MSVC intrinsics are defined on char, short, long, __int64, and pointer -// types. Note that long and int are both 32-bits even on 64-bit Windows. +// types. Note that long and int are both 32-bits even on 64-bit Windows, +// so operations on int are cast to long. +// +// The volatile keyword has additional memory ordering semantics on MSVC. On +// x86 and x86-64, volatile accesses have acquire-release semantics. On ARM64, +// volatile accesses behave like C11's memory_order_relaxed. #ifndef Py_ATOMIC_MSC_H # error "this header file must not be included directly" From 71d981ed0939408a66fd4a6365f5abc961dd361a Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 24 Aug 2023 11:53:00 -0700 Subject: [PATCH 19/24] Add link to atomic_thread_fence documentation --- Include/cpython/pyatomic.h | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 96fd544b52a349..f23d999add367a 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -365,7 +365,10 @@ static inline void _Py_atomic_store_ptr_release(void *address, void *value); -// Sequential consistency fence +// Sequential consistency fence. C11 fences have complex semantics. When +// possible, use the atomic operations on variables defined above, which +// generally do not require explicit use of a fence. +// See https://en.cppreference.com/w/cpp/atomic/atomic_thread_fence static inline void _Py_atomic_fence_seq_cst(void); // Release fence From 457ce2106ba78109f5855e3b320bef6ef46d8c42 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Thu, 24 Aug 2023 12:57:31 -0700 Subject: [PATCH 20/24] Fix _Py_atomic_exchange_int64 on 32-bit Windows --- Include/cpython/pyatomic_msc.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 819a7cf89c7830..c21499e5b715af 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -240,7 +240,7 @@ _Py_atomic_exchange_int64(int64_t *address, int64_t value) #else for (;;) { int64_t old_value = *(volatile int64_t*)address; - if (old_value == _Py_atomic_compare_exchange_int64(address, old_value, value)) { + if (_Py_atomic_compare_exchange_int64(address, old_value, value)) { return old_value; } } From 9dd0f0b9ce4d4737d3e9a7d70c8b92ab6521e31d Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 29 Aug 2023 11:25:35 -0700 Subject: [PATCH 21/24] Add pyatomic_gcc/std.h to vcxproj filters --- PCbuild/pythoncore.vcxproj.filters | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index b9d09716df42f3..d5667097da99cd 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -426,9 +426,15 @@ Include + + Include + Include + + Include + Include From 7611965f6c02f3123b0f96289e4f8a36ff7526f4 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Tue, 29 Aug 2023 11:45:47 -0700 Subject: [PATCH 22/24] Changes from review: - Add pyatomic_*.h headers to MSVC filters - Use a pointer for 2nd argument of compare_exchange functions - Rename address to ptr --- Include/cpython/pyatomic.h | 248 +++++++------- Include/cpython/pyatomic_gcc.h | 436 ++++++++++++------------- Include/cpython/pyatomic_msc.h | 571 ++++++++++++++++++--------------- Include/cpython/pyatomic_std.h | 464 +++++++++++++-------------- Modules/_testcapi/pyatomic.c | 9 +- 5 files changed, 903 insertions(+), 825 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index f23d999add367a..8a95f7b49a7b7e 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -23,346 +23,350 @@ #ifndef Py_ATOMIC_H #define Py_ATOMIC_H -// Atomically adds `value` to `address` and returns the previous value +// Atomically adds `value` to `ptr` and returns the previous value static inline int -_Py_atomic_add_int(int *address, int value); +_Py_atomic_add_int(int *ptr, int value); static inline int8_t -_Py_atomic_add_int8(int8_t *address, int8_t value); +_Py_atomic_add_int8(int8_t *ptr, int8_t value); static inline int16_t -_Py_atomic_add_int16(int16_t *address, int16_t value); +_Py_atomic_add_int16(int16_t *ptr, int16_t value); static inline int32_t -_Py_atomic_add_int32(int32_t *address, int32_t value); +_Py_atomic_add_int32(int32_t *ptr, int32_t value); static inline int64_t -_Py_atomic_add_int64(int64_t *address, int64_t value); +_Py_atomic_add_int64(int64_t *ptr, int64_t value); static inline intptr_t -_Py_atomic_add_intptr(intptr_t *address, intptr_t value); +_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value); static inline unsigned int -_Py_atomic_add_uint(unsigned int *address, unsigned int value); +_Py_atomic_add_uint(unsigned int *ptr, unsigned int value); static inline uint8_t -_Py_atomic_add_uint8(uint8_t *address, uint8_t value); +_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value); static inline uint16_t -_Py_atomic_add_uint16(uint16_t *address, uint16_t value); +_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value); static inline uint32_t -_Py_atomic_add_uint32(uint32_t *address, uint32_t value); +_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value); static inline uint64_t -_Py_atomic_add_uint64(uint64_t *address, uint64_t value); +_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value); static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value); +_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value); static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value); +_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value); -// Performs an atomic compare-and-exchange. If `*address` and `expected` are equal, -// then `value` is stored in `*address`. Returns 1 on success and 0 on failure. -// These correspond to the "strong" variations of the C11 atomic_compare_exchange_* functions. +// Performs an atomic compare-and-exchange. If `*ptr` and `*expected` are +// equal, then `desired` is stored in `*ptr`. Otherwise `*expected` is updated +// with the current value of `*ptr`. Returns 1 on success and 0 on failure. +// These correspond to the "strong" variations of the C11 +// atomic_compare_exchange_* functions. static inline int -_Py_atomic_compare_exchange_int(int *address, int expected, int value); +_Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired); static inline int -_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value); +_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t desired); static inline int -_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value); +_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t desired); static inline int -_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value); +_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t desired); static inline int -_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value); +_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t desired); static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value); +_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t desired); static inline int -_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value); +_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int desired); static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value); +_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t desired); static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value); +_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t desired); static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value); +_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t desired); static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value); +_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t desired); static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value); +_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t desired); static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value); +_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t desired); +// NOTE: `ptr` and `expected` are logically `void**` types, but we use `void*` +// so that we can pass types like `PyObject**` without a cast. static inline int -_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value); +_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *value); -// Atomically replaces `*address` with `value` and returns the previous value of `*address`. +// Atomically replaces `*ptr` with `value` and returns the previous value of `*ptr`. static inline int -_Py_atomic_exchange_int(int *address, int value); +_Py_atomic_exchange_int(int *ptr, int value); static inline int8_t -_Py_atomic_exchange_int8(int8_t *address, int8_t value); +_Py_atomic_exchange_int8(int8_t *ptr, int8_t value); static inline int16_t -_Py_atomic_exchange_int16(int16_t *address, int16_t value); +_Py_atomic_exchange_int16(int16_t *ptr, int16_t value); static inline int32_t -_Py_atomic_exchange_int32(int32_t *address, int32_t value); +_Py_atomic_exchange_int32(int32_t *ptr, int32_t value); static inline int64_t -_Py_atomic_exchange_int64(int64_t *address, int64_t value); +_Py_atomic_exchange_int64(int64_t *ptr, int64_t value); static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value); +_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value); static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *address, unsigned int value); +_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value); static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value); +_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value); static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value); +_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value); static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value); +_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value); static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value); +_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value); static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value); +_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value); static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value); +_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value); static inline void * -_Py_atomic_exchange_ptr(void *address, void *value); +_Py_atomic_exchange_ptr(void *ptr, void *value); -// Performs `*address &= value` atomically and returns the previous value of `*address`. +// Performs `*ptr &= value` atomically and returns the previous value of `*ptr`. static inline uint8_t -_Py_atomic_and_uint8(uint8_t *address, uint8_t value); +_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value); static inline uint16_t -_Py_atomic_and_uint16(uint16_t *address, uint16_t value); +_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value); static inline uint32_t -_Py_atomic_and_uint32(uint32_t *address, uint32_t value); +_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value); static inline uint64_t -_Py_atomic_and_uint64(uint64_t *address, uint64_t value); +_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value); static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value); +_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value); -// Performs `*address |= value` atomically and returns the previous value of `*address`. +// Performs `*ptr |= value` atomically and returns the previous value of `*ptr`. static inline uint8_t -_Py_atomic_or_uint8(uint8_t *address, uint8_t value); +_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value); static inline uint16_t -_Py_atomic_or_uint16(uint16_t *address, uint16_t value); +_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value); static inline uint32_t -_Py_atomic_or_uint32(uint32_t *address, uint32_t value); +_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value); static inline uint64_t -_Py_atomic_or_uint64(uint64_t *address, uint64_t value); +_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value); static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value); +_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value); -// Atomically loads `*address` (sequential consistency) +// Atomically loads `*ptr` (sequential consistency) static inline int -_Py_atomic_load_int(const int *address); +_Py_atomic_load_int(const int *ptr); static inline int8_t -_Py_atomic_load_int8(const int8_t *address); +_Py_atomic_load_int8(const int8_t *ptr); static inline int16_t -_Py_atomic_load_int16(const int16_t *address); +_Py_atomic_load_int16(const int16_t *ptr); static inline int32_t -_Py_atomic_load_int32(const int32_t *address); +_Py_atomic_load_int32(const int32_t *ptr); static inline int64_t -_Py_atomic_load_int64(const int64_t *address); +_Py_atomic_load_int64(const int64_t *ptr); static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *address); +_Py_atomic_load_intptr(const intptr_t *ptr); static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *address); +_Py_atomic_load_uint8(const uint8_t *ptr); static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *address); +_Py_atomic_load_uint16(const uint16_t *ptr); static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *address); +_Py_atomic_load_uint32(const uint32_t *ptr); static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *address); +_Py_atomic_load_uint64(const uint64_t *ptr); static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *address); +_Py_atomic_load_uintptr(const uintptr_t *ptr); static inline unsigned int -_Py_atomic_load_uint(const unsigned int *address); +_Py_atomic_load_uint(const unsigned int *ptr); static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *address); +_Py_atomic_load_ssize(const Py_ssize_t *ptr); static inline void * -_Py_atomic_load_ptr(const void *address); +_Py_atomic_load_ptr(const void *ptr); -// Loads `*address` (relaxed consistency, i.e., no ordering) +// Loads `*ptr` (relaxed consistency, i.e., no ordering) static inline int -_Py_atomic_load_int_relaxed(const int *address); +_Py_atomic_load_int_relaxed(const int *ptr); static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *address); +_Py_atomic_load_int8_relaxed(const int8_t *ptr); static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *address); +_Py_atomic_load_int16_relaxed(const int16_t *ptr); static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *address); +_Py_atomic_load_int32_relaxed(const int32_t *ptr); static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *address); +_Py_atomic_load_int64_relaxed(const int64_t *ptr); static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *address); +_Py_atomic_load_intptr_relaxed(const intptr_t *ptr); static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *address); +_Py_atomic_load_uint8_relaxed(const uint8_t *ptr); static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *address); +_Py_atomic_load_uint16_relaxed(const uint16_t *ptr); static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *address); +_Py_atomic_load_uint32_relaxed(const uint32_t *ptr); static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *address); +_Py_atomic_load_uint64_relaxed(const uint64_t *ptr); static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *address); +_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr); static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *address); +_Py_atomic_load_uint_relaxed(const unsigned int *ptr); static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address); +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr); static inline void * -_Py_atomic_load_ptr_relaxed(const void *address); +_Py_atomic_load_ptr_relaxed(const void *ptr); -// Atomically performs `*address = value` (sequential consistency) +// Atomically performs `*ptr = value` (sequential consistency) static inline void -_Py_atomic_store_int(int *address, int value); +_Py_atomic_store_int(int *ptr, int value); static inline void -_Py_atomic_store_int8(int8_t *address, int8_t value); +_Py_atomic_store_int8(int8_t *ptr, int8_t value); static inline void -_Py_atomic_store_int16(int16_t *address, int16_t value); +_Py_atomic_store_int16(int16_t *ptr, int16_t value); static inline void -_Py_atomic_store_int32(int32_t *address, int32_t value); +_Py_atomic_store_int32(int32_t *ptr, int32_t value); static inline void -_Py_atomic_store_int64(int64_t *address, int64_t value); +_Py_atomic_store_int64(int64_t *ptr, int64_t value); static inline void -_Py_atomic_store_intptr(intptr_t *address, intptr_t value); +_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value); static inline void -_Py_atomic_store_uint8(uint8_t *address, uint8_t value); +_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value); static inline void -_Py_atomic_store_uint16(uint16_t *address, uint16_t value); +_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value); static inline void -_Py_atomic_store_uint32(uint32_t *address, uint32_t value); +_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value); static inline void -_Py_atomic_store_uint64(uint64_t *address, uint64_t value); +_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value); static inline void -_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value); +_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value); static inline void -_Py_atomic_store_uint(unsigned int *address, unsigned int value); +_Py_atomic_store_uint(unsigned int *ptr, unsigned int value); static inline void -_Py_atomic_store_ptr(void *address, void *value); +_Py_atomic_store_ptr(void *ptr, void *value); static inline void -_Py_atomic_store_ssize(Py_ssize_t* address, Py_ssize_t value); +_Py_atomic_store_ssize(Py_ssize_t* ptr, Py_ssize_t value); -// Stores `*address = value` (relaxed consistency, i.e., no ordering) +// Stores `*ptr = value` (relaxed consistency, i.e., no ordering) static inline void -_Py_atomic_store_int_relaxed(int *address, int value); +_Py_atomic_store_int_relaxed(int *ptr, int value); static inline void -_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value); +_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value); static inline void -_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value); +_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value); static inline void -_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value); +_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value); static inline void -_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value); +_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value); static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value); +_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value); static inline void -_Py_atomic_store_uint8_relaxed(uint8_t* address, uint8_t value); +_Py_atomic_store_uint8_relaxed(uint8_t* ptr, uint8_t value); static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value); +_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value); static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value); +_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value); static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value); +_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value); static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value); +_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value); static inline void -_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value); +_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value); static inline void -_Py_atomic_store_ptr_relaxed(void *address, void *value); +_Py_atomic_store_ptr_relaxed(void *ptr, void *value); static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value); +_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value); -// Loads `*address` (acquire operation) +// Loads `*ptr` (acquire operation) static inline void * -_Py_atomic_load_ptr_acquire(const void *address); +_Py_atomic_load_ptr_acquire(const void *ptr); -// Stores `*address = value` (release operation) +// Stores `*ptr = value` (release operation) static inline void -_Py_atomic_store_ptr_release(void *address, void *value); +_Py_atomic_store_ptr_release(void *ptr, void *value); // Sequential consistency fence. C11 fences have complex semantics. When diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index b5292e5c12c8e3..356b62a70a598d 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -8,674 +8,674 @@ #endif static inline int -_Py_atomic_add_int(int *address, int value) +_Py_atomic_add_int(int *ptr, int value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_add_int8(int8_t *address, int8_t value) +_Py_atomic_add_int8(int8_t *ptr, int8_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_add_int16(int16_t *address, int16_t value) +_Py_atomic_add_int16(int16_t *ptr, int16_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_add_int32(int32_t *address, int32_t value) +_Py_atomic_add_int32(int32_t *ptr, int32_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_add_int64(int64_t *address, int64_t value) +_Py_atomic_add_int64(int64_t *ptr, int64_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_add_intptr(intptr_t *address, intptr_t value) +_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_add_uint(unsigned int *address, unsigned int value) +_Py_atomic_add_uint(unsigned int *ptr, unsigned int value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_add_uint8(uint8_t *address, uint8_t value) +_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_add_uint16(uint16_t *address, uint16_t value) +_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_add_uint32(uint32_t *address, uint32_t value) +_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_add_uint64(uint64_t *address, uint64_t value) +_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value) { - return __atomic_fetch_add(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int(int *address, int expected, int value) +_Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) +_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) +_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t desired) { - return __atomic_compare_exchange_n(address, &expected, value, 0, + return __atomic_compare_exchange_n(ptr, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *desired) { - return __atomic_compare_exchange_n((void **)address, &expected, value, 0, + return __atomic_compare_exchange_n((void **)ptr, (void **)expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_exchange_int(int *address, int value) +_Py_atomic_exchange_int(int *ptr, int value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_exchange_int8(int8_t *address, int8_t value) +_Py_atomic_exchange_int8(int8_t *ptr, int8_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_exchange_int16(int16_t *address, int16_t value) +_Py_atomic_exchange_int16(int16_t *ptr, int16_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_exchange_int32(int32_t *address, int32_t value) +_Py_atomic_exchange_int32(int32_t *ptr, int32_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_exchange_int64(int64_t *address, int64_t value) +_Py_atomic_exchange_int64(int64_t *ptr, int64_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *address, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value) { - return __atomic_exchange_n(address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void * -_Py_atomic_exchange_ptr(void *address, void *value) +_Py_atomic_exchange_ptr(void *ptr, void *value) { - return __atomic_exchange_n((void **)address, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n((void **)ptr, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_and_uint8(uint8_t *address, uint8_t value) +_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value) { - return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_and_uint16(uint16_t *address, uint16_t value) +_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value) { - return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_and_uint32(uint32_t *address, uint32_t value) +_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value) { - return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_and_uint64(uint64_t *address, uint64_t value) +_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) { - return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value) { - return __atomic_fetch_and(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_or_uint8(uint8_t *address, uint8_t value) +_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value) { - return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_or_uint16(uint16_t *address, uint16_t value) +_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value) { - return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_or_uint32(uint32_t *address, uint32_t value) +_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value) { - return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_or_uint64(uint64_t *address, uint64_t value) +_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) { - return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value) { - return __atomic_fetch_or(address, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_load_int(const int *address) +_Py_atomic_load_int(const int *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_load_int8(const int8_t *address) +_Py_atomic_load_int8(const int8_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_load_int16(const int16_t *address) +_Py_atomic_load_int16(const int16_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_load_int32(const int32_t *address) +_Py_atomic_load_int32(const int32_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_load_int64(const int64_t *address) +_Py_atomic_load_int64(const int64_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *address) +_Py_atomic_load_intptr(const intptr_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *address) +_Py_atomic_load_uint8(const uint8_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *address) +_Py_atomic_load_uint16(const uint16_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *address) +_Py_atomic_load_uint32(const uint32_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *address) +_Py_atomic_load_uint64(const uint64_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *address) +_Py_atomic_load_uintptr(const uintptr_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_load_uint(const unsigned int *address) +_Py_atomic_load_uint(const unsigned int *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *address) +_Py_atomic_load_ssize(const Py_ssize_t *ptr) { - return __atomic_load_n(address, __ATOMIC_SEQ_CST); + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); } static inline void * -_Py_atomic_load_ptr(const void *address) +_Py_atomic_load_ptr(const void *ptr) { - return (void *)__atomic_load_n((void **)address, __ATOMIC_SEQ_CST); + return (void *)__atomic_load_n((void **)ptr, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_load_int_relaxed(const int *address) +_Py_atomic_load_int_relaxed(const int *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *address) +_Py_atomic_load_int8_relaxed(const int8_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *address) +_Py_atomic_load_int16_relaxed(const int16_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *address) +_Py_atomic_load_int32_relaxed(const int32_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *address) +_Py_atomic_load_int64_relaxed(const int64_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *address) +_Py_atomic_load_intptr_relaxed(const intptr_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *address) +_Py_atomic_load_uint8_relaxed(const uint8_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *address) +_Py_atomic_load_uint16_relaxed(const uint16_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *address) +_Py_atomic_load_uint32_relaxed(const uint32_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *address) +_Py_atomic_load_uint64_relaxed(const uint64_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *address) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *address) +_Py_atomic_load_uint_relaxed(const unsigned int *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr) { - return __atomic_load_n(address, __ATOMIC_RELAXED); + return __atomic_load_n(ptr, __ATOMIC_RELAXED); } static inline void * -_Py_atomic_load_ptr_relaxed(const void *address) +_Py_atomic_load_ptr_relaxed(const void *ptr) { - return (void *)__atomic_load_n((const void **)address, __ATOMIC_RELAXED); + return (void *)__atomic_load_n((const void **)ptr, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int(int *address, int value) +_Py_atomic_store_int(int *ptr, int value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int8(int8_t *address, int8_t value) +_Py_atomic_store_int8(int8_t *ptr, int8_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int16(int16_t *address, int16_t value) +_Py_atomic_store_int16(int16_t *ptr, int16_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int32(int32_t *address, int32_t value) +_Py_atomic_store_int32(int32_t *ptr, int32_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int64(int64_t *address, int64_t value) +_Py_atomic_store_int64(int64_t *ptr, int64_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_intptr(intptr_t *address, intptr_t value) +_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint8(uint8_t *address, uint8_t value) +_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint16(uint16_t *address, uint16_t value) +_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint32(uint32_t *address, uint32_t value) +_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint64(uint64_t *address, uint64_t value) +_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint(unsigned int *address, unsigned int value) +_Py_atomic_store_uint(unsigned int *ptr, unsigned int value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_ptr(void *address, void *value) +_Py_atomic_store_ptr(void *ptr, void *value) { - __atomic_store_n((void **)address, value, __ATOMIC_SEQ_CST); + __atomic_store_n((void **)ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *ptr, Py_ssize_t value) { - __atomic_store_n(address, value, __ATOMIC_SEQ_CST); + __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int_relaxed(int *address, int value) +_Py_atomic_store_int_relaxed(int *ptr, int value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *ptr, uint8_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_ptr_relaxed(void *address, void *value) +_Py_atomic_store_ptr_relaxed(void *ptr, void *value) { - __atomic_store_n((void **)address, value, __ATOMIC_RELAXED); + __atomic_store_n((void **)ptr, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value) { - __atomic_store_n(address, value, __ATOMIC_RELAXED); + __atomic_store_n(ptr, value, __ATOMIC_RELAXED); } static inline void * -_Py_atomic_load_ptr_acquire(const void *address) +_Py_atomic_load_ptr_acquire(const void *ptr) { - return (void *)__atomic_load_n((void **)address, __ATOMIC_ACQUIRE); + return (void *)__atomic_load_n((void **)ptr, __ATOMIC_ACQUIRE); } static inline void -_Py_atomic_store_ptr_release(void *address, void *value) +_Py_atomic_store_ptr_release(void *ptr, void *value) { - __atomic_store_n((void **)address, value, __ATOMIC_RELEASE); + __atomic_store_n((void **)ptr, value, __ATOMIC_RELEASE); } static inline void diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index c21499e5b715af..77da14efd9dd25 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -16,43 +16,43 @@ #include static inline int -_Py_atomic_add_int(int *address, int value) +_Py_atomic_add_int(int *ptr, int value) { Py_BUILD_ASSERT(sizeof(int) == sizeof(long)); - return (int)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (int)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); } static inline int8_t -_Py_atomic_add_int8(int8_t *address, int8_t value) +_Py_atomic_add_int8(int8_t *ptr, int8_t value) { Py_BUILD_ASSERT(sizeof(int8_t) == sizeof(char)); - return (int8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); + return (int8_t)_InterlockedExchangeAdd8((volatile char*)ptr, (char)value); } static inline int16_t -_Py_atomic_add_int16(int16_t *address, int16_t value) +_Py_atomic_add_int16(int16_t *ptr, int16_t value) { Py_BUILD_ASSERT(sizeof(int16_t) == sizeof(short)); - return (int16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); + return (int16_t)_InterlockedExchangeAdd16((volatile short*)ptr, (short)value); } static inline int32_t -_Py_atomic_add_int32(int32_t *address, int32_t value) +_Py_atomic_add_int32(int32_t *ptr, int32_t value) { Py_BUILD_ASSERT(sizeof(int32_t) == sizeof(long)); - return (int32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (int32_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); } static inline int64_t -_Py_atomic_add_int64(int64_t *address, int64_t value) +_Py_atomic_add_int64(int64_t *ptr, int64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); + return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); #else + int64_t old_value = *(volatile int64_t*)ptr; for (;;) { - int64_t old_value = *(volatile int64_t*)address; int64_t new_value = old_value + value; - if (_Py_atomic_compare_exchange_int64(address, old_value, new_value)) { + if (_Py_atomic_compare_exchange_int64(ptr, &old_value, new_value)) { return old_value; } } @@ -60,187 +60,258 @@ _Py_atomic_add_int64(int64_t *address, int64_t value) } static inline intptr_t -_Py_atomic_add_intptr(intptr_t *address, intptr_t value) +_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value) { #if SIZEOF_VOID_P == 8 - return (intptr_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); + return (intptr_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); #else - return (intptr_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (intptr_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); #endif } static inline unsigned int -_Py_atomic_add_uint(unsigned int *address, unsigned int value) +_Py_atomic_add_uint(unsigned int *ptr, unsigned int value) { - return (unsigned int)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (unsigned int)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); } static inline uint8_t -_Py_atomic_add_uint8(uint8_t *address, uint8_t value) +_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value) { - return (uint8_t)_InterlockedExchangeAdd8((volatile char*)address, (char)value); + return (uint8_t)_InterlockedExchangeAdd8((volatile char*)ptr, (char)value); } static inline uint16_t -_Py_atomic_add_uint16(uint16_t *address, uint16_t value) +_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value) { - return (uint16_t)_InterlockedExchangeAdd16((volatile short*)address, (short)value); + return (uint16_t)_InterlockedExchangeAdd16((volatile short*)ptr, (short)value); } static inline uint32_t -_Py_atomic_add_uint32(uint32_t *address, uint32_t value) +_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value) { - return (uint32_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (uint32_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); } static inline uint64_t -_Py_atomic_add_uint64(uint64_t *address, uint64_t value) +_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value) { - return (uint64_t)_Py_atomic_add_int64((int64_t*)address, (int64_t)value); + return (uint64_t)_Py_atomic_add_int64((int64_t*)ptr, (int64_t)value); } static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value) { #if SIZEOF_VOID_P == 8 - return (uintptr_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); + return (uintptr_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); #else - return (uintptr_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (uintptr_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); #endif } static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - return (Py_ssize_t)_InterlockedExchangeAdd64((volatile __int64*)address, (__int64)value); + return (Py_ssize_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); #else - return (Py_ssize_t)_InterlockedExchangeAdd((volatile long*)address, (long)value); + return (Py_ssize_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); #endif } static inline int -_Py_atomic_compare_exchange_int(int *address, int expected, int value) +_Py_atomic_compare_exchange_int(int *ptr, int *expected, int value) { - return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); + int initial = (int)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t value) { - return (char)expected == _InterlockedCompareExchange8((volatile char*)address, (char)value, (char)expected); + int8_t initial = (int8_t)_InterlockedCompareExchange8((volatile char*)ptr, (char)value, (char)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t value) { - return (short)expected == _InterlockedCompareExchange16((volatile short*)address, (short)value, (short)expected); + int16_t initial = (int16_t)_InterlockedCompareExchange16((volatile short*)ptr, (short)value, (short)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t value) { - return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); + int32_t initial = (int32_t)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t value) { - return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); + int64_t initial = (int64_t)_InterlockedCompareExchange64((volatile __int64*)ptr, (__int64)value, (__int64)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t value) { - return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); + intptr_t initial = (intptr_t)_InterlockedCompareExchangePointer((void * volatile *)ptr, (void *)value, (void *)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) +_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int value) { - return (char)expected == _InterlockedCompareExchange8((volatile char*)address, (char)value, (char)expected); + unsigned int initial = (unsigned int)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t value) { - return (short)expected == _InterlockedCompareExchange16((volatile short*)address, (short)value, (short)expected); + uint8_t initial = (uint8_t)_InterlockedCompareExchange8((volatile char*)ptr, (char)value, (char)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) +_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t value) { - return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); + uint16_t initial = (uint16_t)_InterlockedCompareExchange16((volatile short*)ptr, (short)value, (short)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t value) { - return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); + uint32_t initial = (uint32_t)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t value) { - return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); + uint64_t initial = (uint64_t)_InterlockedCompareExchange64((volatile __int64*)ptr, (__int64)value, (__int64)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t value) { - return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); + uintptr_t initial = (uintptr_t)_InterlockedCompareExchangePointer((void * volatile *)ptr, (void *)value, (void *)*expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t value) { + Py_ssize_t initial = #if SIZEOF_SIZE_T == 8 - return (__int64)expected == _InterlockedCompareExchange64((volatile __int64*)address, (__int64)value, (__int64)expected); + _InterlockedCompareExchange64((volatile __int64*)ptr, (__int64)value, (__int64)*expected); #else - return (long)expected == _InterlockedCompareExchange((volatile long*)address, (long)value, (long)expected); + _InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); #endif + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; } static inline int -_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *value) { - return (void *)expected == _InterlockedCompareExchangePointer((void * volatile *)address, (void *)value, (void *)expected); + void *initial = _InterlockedCompareExchangePointer((void **)ptr, value, *(void **)expected); + if (initial == *(void **)expected) { + return 1; + } + *(void **)expected = initial; + return 0; } static inline int -_Py_atomic_exchange_int(int *address, int value) +_Py_atomic_exchange_int(int *ptr, int value) { - return (int)_InterlockedExchange((volatile long*)address, (long)value); + return (int)_InterlockedExchange((volatile long*)ptr, (long)value); } static inline int8_t -_Py_atomic_exchange_int8(int8_t *address, int8_t value) +_Py_atomic_exchange_int8(int8_t *ptr, int8_t value) { - return (int8_t)_InterlockedExchange8((volatile char*)address, (char)value); + return (int8_t)_InterlockedExchange8((volatile char*)ptr, (char)value); } static inline int16_t -_Py_atomic_exchange_int16(int16_t *address, int16_t value) +_Py_atomic_exchange_int16(int16_t *ptr, int16_t value) { - return (int16_t)_InterlockedExchange16((volatile short*)address, (short)value); + return (int16_t)_InterlockedExchange16((volatile short*)ptr, (short)value); } static inline int32_t -_Py_atomic_exchange_int32(int32_t *address, int32_t value) +_Py_atomic_exchange_int32(int32_t *ptr, int32_t value) { - return (int32_t)_InterlockedExchange((volatile long*)address, (long)value); + return (int32_t)_InterlockedExchange((volatile long*)ptr, (long)value); } static inline int64_t -_Py_atomic_exchange_int64(int64_t *address, int64_t value) +_Py_atomic_exchange_int64(int64_t *ptr, int64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (int64_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); + return (int64_t)_InterlockedExchange64((volatile __int64*)ptr, (__int64)value); #else + int64_t old_value = *(volatile int64_t*)ptr; for (;;) { - int64_t old_value = *(volatile int64_t*)address; - if (_Py_atomic_compare_exchange_int64(address, old_value, value)) { + if (_Py_atomic_compare_exchange_int64(ptr, &old_value, value)) { return old_value; } } @@ -248,91 +319,91 @@ _Py_atomic_exchange_int64(int64_t *address, int64_t value) } static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value) { - return (intptr_t)_InterlockedExchangePointer((void * volatile *)address, (void *)value); + return (intptr_t)_InterlockedExchangePointer((void * volatile *)ptr, (void *)value); } static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *address, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value) { - return (unsigned int)_InterlockedExchange((volatile long*)address, (long)value); + return (unsigned int)_InterlockedExchange((volatile long*)ptr, (long)value); } static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value) { - return (uint8_t)_InterlockedExchange8((volatile char*)address, (char)value); + return (uint8_t)_InterlockedExchange8((volatile char*)ptr, (char)value); } static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value) { - return (uint16_t)_InterlockedExchange16((volatile short*)address, (short)value); + return (uint16_t)_InterlockedExchange16((volatile short*)ptr, (short)value); } static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value) { - return (uint32_t)_InterlockedExchange((volatile long*)address, (long)value); + return (uint32_t)_InterlockedExchange((volatile long*)ptr, (long)value); } static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value) { - return (uint64_t)_Py_atomic_exchange_int64((int64_t *)address, (int64_t)value); + return (uint64_t)_Py_atomic_exchange_int64((int64_t *)ptr, (int64_t)value); } static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value) { - return (uintptr_t)_InterlockedExchangePointer((void * volatile *)address, (void *)value); + return (uintptr_t)_InterlockedExchangePointer((void * volatile *)ptr, (void *)value); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - return (Py_ssize_t)_InterlockedExchange64((volatile __int64*)address, (__int64)value); + return (Py_ssize_t)_InterlockedExchange64((volatile __int64*)ptr, (__int64)value); #else - return (Py_ssize_t)_InterlockedExchange((volatile long*)address, (long)value); + return (Py_ssize_t)_InterlockedExchange((volatile long*)ptr, (long)value); #endif } static inline void * -_Py_atomic_exchange_ptr(void *address, void *value) +_Py_atomic_exchange_ptr(void *ptr, void *value) { - return (void *)_InterlockedExchangePointer((void * volatile *)address, (void *)value); + return (void *)_InterlockedExchangePointer((void * volatile *)ptr, (void *)value); } static inline uint8_t -_Py_atomic_and_uint8(uint8_t *address, uint8_t value) +_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value) { - return (uint8_t)_InterlockedAnd8((volatile char*)address, (char)value); + return (uint8_t)_InterlockedAnd8((volatile char*)ptr, (char)value); } static inline uint16_t -_Py_atomic_and_uint16(uint16_t *address, uint16_t value) +_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value) { - return (uint16_t)_InterlockedAnd16((volatile short*)address, (short)value); + return (uint16_t)_InterlockedAnd16((volatile short*)ptr, (short)value); } static inline uint32_t -_Py_atomic_and_uint32(uint32_t *address, uint32_t value) +_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value) { - return (uint32_t)_InterlockedAnd((volatile long*)address, (long)value); + return (uint32_t)_InterlockedAnd((volatile long*)ptr, (long)value); } static inline uint64_t -_Py_atomic_and_uint64(uint64_t *address, uint64_t value) +_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (uint64_t)_InterlockedAnd64((volatile __int64*)address, (__int64)value); + return (uint64_t)_InterlockedAnd64((volatile __int64*)ptr, (__int64)value); #else + uint64_t old_value = *(volatile uint64_t*)ptr; for (;;) { - uint64_t old_value = *(volatile uint64_t*)address; uint64_t new_value = old_value & value; - if (_Py_atomic_compare_exchange_uint64(address, old_value, new_value)) { + if (_Py_atomic_compare_exchange_uint64(ptr, &old_value, new_value)) { return old_value; } } @@ -340,43 +411,43 @@ _Py_atomic_and_uint64(uint64_t *address, uint64_t value) } static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value) { #if SIZEOF_VOID_P == 8 - return (uintptr_t)_InterlockedAnd64((volatile __int64*)address, (__int64)value); + return (uintptr_t)_InterlockedAnd64((volatile __int64*)ptr, (__int64)value); #else - return (uintptr_t)_InterlockedAnd((volatile long*)address, (long)value); + return (uintptr_t)_InterlockedAnd((volatile long*)ptr, (long)value); #endif } static inline uint8_t -_Py_atomic_or_uint8(uint8_t *address, uint8_t value) +_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value) { - return (uint8_t)_InterlockedOr8((volatile char*)address, (char)value); + return (uint8_t)_InterlockedOr8((volatile char*)ptr, (char)value); } static inline uint16_t -_Py_atomic_or_uint16(uint16_t *address, uint16_t value) +_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value) { - return (uint16_t)_InterlockedOr16((volatile short*)address, (short)value); + return (uint16_t)_InterlockedOr16((volatile short*)ptr, (short)value); } static inline uint32_t -_Py_atomic_or_uint32(uint32_t *address, uint32_t value) +_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value) { - return (uint32_t)_InterlockedOr((volatile long*)address, (long)value); + return (uint32_t)_InterlockedOr((volatile long*)ptr, (long)value); } static inline uint64_t -_Py_atomic_or_uint64(uint64_t *address, uint64_t value) +_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (uint64_t)_InterlockedOr64((volatile __int64*)address, (__int64)value); + return (uint64_t)_InterlockedOr64((volatile __int64*)ptr, (__int64)value); #else + uint64_t old_value = *(volatile uint64_t *)ptr; for (;;) { - uint64_t old_value = *(volatile uint64_t *)address; uint64_t new_value = old_value | value; - if (_Py_atomic_compare_exchange_uint64(address, old_value, new_value)) { + if (_Py_atomic_compare_exchange_uint64(ptr, &old_value, new_value)) { return old_value; } } @@ -384,460 +455,460 @@ _Py_atomic_or_uint64(uint64_t *address, uint64_t value) } static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value) { #if SIZEOF_VOID_P == 8 - return (uintptr_t)_InterlockedOr64((volatile __int64*)address, (__int64)value); + return (uintptr_t)_InterlockedOr64((volatile __int64*)ptr, (__int64)value); #else - return (uintptr_t)_InterlockedOr((volatile long*)address, (long)value); + return (uintptr_t)_InterlockedOr((volatile long*)ptr, (long)value); #endif } static inline int -_Py_atomic_load_int(const int *address) +_Py_atomic_load_int(const int *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int *)address; + return *(volatile int *)ptr; #elif defined(_M_ARM64) - return (int)__ldar32((unsigned __int32 volatile*)address); + return (int)__ldar32((unsigned __int32 volatile*)ptr); #else #error no implementation of _Py_atomic_load_int #endif } static inline int8_t -_Py_atomic_load_int8(const int8_t *address) +_Py_atomic_load_int8(const int8_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int8_t *)address; + return *(volatile int8_t *)ptr; #elif defined(_M_ARM64) - return (int8_t)__ldar8((unsigned __int8 volatile*)address); + return (int8_t)__ldar8((unsigned __int8 volatile*)ptr); #else #error no implementation of _Py_atomic_load_int8 #endif } static inline int16_t -_Py_atomic_load_int16(const int16_t *address) +_Py_atomic_load_int16(const int16_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int16_t *)address; + return *(volatile int16_t *)ptr; #elif defined(_M_ARM64) - return (int16_t)__ldar16((unsigned __int16 volatile*)address); + return (int16_t)__ldar16((unsigned __int16 volatile*)ptr); #else #error no implementation of _Py_atomic_load_int16 #endif } static inline int32_t -_Py_atomic_load_int32(const int32_t *address) +_Py_atomic_load_int32(const int32_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int32_t *)address; + return *(volatile int32_t *)ptr; #elif defined(_M_ARM64) - return (int32_t)__ldar32((unsigned __int32 volatile*)address); + return (int32_t)__ldar32((unsigned __int32 volatile*)ptr); #else #error no implementation of _Py_atomic_load_int32 #endif } static inline int64_t -_Py_atomic_load_int64(const int64_t *address) +_Py_atomic_load_int64(const int64_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int64_t *)address; + return *(volatile int64_t *)ptr; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)address); + return __ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_int64 #endif } static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *address) +_Py_atomic_load_intptr(const intptr_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile intptr_t *)address; + return *(volatile intptr_t *)ptr; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)address); + return __ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_intptr #endif } static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *address) +_Py_atomic_load_uint8(const uint8_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint8_t *)address; + return *(volatile uint8_t *)ptr; #elif defined(_M_ARM64) - return __ldar8((unsigned __int8 volatile*)address); + return __ldar8((unsigned __int8 volatile*)ptr); #else #error no implementation of _Py_atomic_load_uint8 #endif } static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *address) +_Py_atomic_load_uint16(const uint16_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint16_t *)address; + return *(volatile uint16_t *)ptr; #elif defined(_M_ARM64) - return __ldar16((unsigned __int16 volatile*)address); + return __ldar16((unsigned __int16 volatile*)ptr); #else #error no implementation of _Py_atomic_load_uint16 #endif } static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *address) +_Py_atomic_load_uint32(const uint32_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint32_t *)address; + return *(volatile uint32_t *)ptr; #elif defined(_M_ARM64) - return __ldar32((unsigned __int32 volatile*)address); + return __ldar32((unsigned __int32 volatile*)ptr); #else #error no implementation of _Py_atomic_load_uint32 #endif } static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *address) +_Py_atomic_load_uint64(const uint64_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint64_t *)address; + return *(volatile uint64_t *)ptr; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)address); + return __ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_uint64 #endif } static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *address) +_Py_atomic_load_uintptr(const uintptr_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uintptr_t *)address; + return *(volatile uintptr_t *)ptr; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)address); + return __ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_uintptr #endif } static inline unsigned int -_Py_atomic_load_uint(const unsigned int *address) +_Py_atomic_load_uint(const unsigned int *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile unsigned int *)address; + return *(volatile unsigned int *)ptr; #elif defined(_M_ARM64) - return __ldar32((unsigned __int32 volatile*)address); + return __ldar32((unsigned __int32 volatile*)ptr); #else #error no implementation of _Py_atomic_load_uint #endif } static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *address) +_Py_atomic_load_ssize(const Py_ssize_t *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile Py_ssize_t *)address; + return *(volatile Py_ssize_t *)ptr; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)address); + return __ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_ssize #endif } static inline void * -_Py_atomic_load_ptr(const void *address) +_Py_atomic_load_ptr(const void *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(void * volatile *)address; + return *(void * volatile *)ptr; #elif defined(_M_ARM64) - return (void *)__ldar64((unsigned __int64 volatile*)address); + return (void *)__ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_ptr #endif } static inline int -_Py_atomic_load_int_relaxed(const int *address) +_Py_atomic_load_int_relaxed(const int *ptr) { - return *(volatile int *)address; + return *(volatile int *)ptr; } static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *address) +_Py_atomic_load_int8_relaxed(const int8_t *ptr) { - return *(volatile int8_t *)address; + return *(volatile int8_t *)ptr; } static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *address) +_Py_atomic_load_int16_relaxed(const int16_t *ptr) { - return *(volatile int16_t *)address; + return *(volatile int16_t *)ptr; } static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *address) +_Py_atomic_load_int32_relaxed(const int32_t *ptr) { - return *(volatile int32_t *)address; + return *(volatile int32_t *)ptr; } static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *address) +_Py_atomic_load_int64_relaxed(const int64_t *ptr) { - return *(volatile int64_t *)address; + return *(volatile int64_t *)ptr; } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *address) +_Py_atomic_load_intptr_relaxed(const intptr_t *ptr) { - return *(volatile intptr_t *)address; + return *(volatile intptr_t *)ptr; } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *address) +_Py_atomic_load_uint8_relaxed(const uint8_t *ptr) { - return *(volatile uint8_t *)address; + return *(volatile uint8_t *)ptr; } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *address) +_Py_atomic_load_uint16_relaxed(const uint16_t *ptr) { - return *(volatile uint16_t *)address; + return *(volatile uint16_t *)ptr; } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *address) +_Py_atomic_load_uint32_relaxed(const uint32_t *ptr) { - return *(volatile uint32_t *)address; + return *(volatile uint32_t *)ptr; } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *address) +_Py_atomic_load_uint64_relaxed(const uint64_t *ptr) { - return *(volatile uint64_t *)address; + return *(volatile uint64_t *)ptr; } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *address) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr) { - return *(volatile uintptr_t *)address; + return *(volatile uintptr_t *)ptr; } static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *address) +_Py_atomic_load_uint_relaxed(const unsigned int *ptr) { - return *(volatile unsigned int *)address; + return *(volatile unsigned int *)ptr; } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr) { - return *(volatile Py_ssize_t *)address; + return *(volatile Py_ssize_t *)ptr; } static inline void* -_Py_atomic_load_ptr_relaxed(const void *address) +_Py_atomic_load_ptr_relaxed(const void *ptr) { - return *(void * volatile *)address; + return *(void * volatile *)ptr; } static inline void -_Py_atomic_store_int(int *address, int value) +_Py_atomic_store_int(int *ptr, int value) { - _InterlockedExchange((volatile long*)address, (long)value); + _InterlockedExchange((volatile long*)ptr, (long)value); } static inline void -_Py_atomic_store_int8(int8_t *address, int8_t value) +_Py_atomic_store_int8(int8_t *ptr, int8_t value) { - _InterlockedExchange8((volatile char*)address, (char)value); + _InterlockedExchange8((volatile char*)ptr, (char)value); } static inline void -_Py_atomic_store_int16(int16_t *address, int16_t value) +_Py_atomic_store_int16(int16_t *ptr, int16_t value) { - _InterlockedExchange16((volatile short*)address, (short)value); + _InterlockedExchange16((volatile short*)ptr, (short)value); } static inline void -_Py_atomic_store_int32(int32_t *address, int32_t value) +_Py_atomic_store_int32(int32_t *ptr, int32_t value) { - _InterlockedExchange((volatile long*)address, (long)value); + _InterlockedExchange((volatile long*)ptr, (long)value); } static inline void -_Py_atomic_store_int64(int64_t *address, int64_t value) +_Py_atomic_store_int64(int64_t *ptr, int64_t value) { - _Py_atomic_exchange_int64(address, value); + _Py_atomic_exchange_int64(ptr, value); } static inline void -_Py_atomic_store_intptr(intptr_t *address, intptr_t value) +_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value) { - _InterlockedExchangePointer((void * volatile *)address, (void *)value); + _InterlockedExchangePointer((void * volatile *)ptr, (void *)value); } static inline void -_Py_atomic_store_uint8(uint8_t *address, uint8_t value) +_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value) { - _InterlockedExchange8((volatile char*)address, (char)value); + _InterlockedExchange8((volatile char*)ptr, (char)value); } static inline void -_Py_atomic_store_uint16(uint16_t *address, uint16_t value) +_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value) { - _InterlockedExchange16((volatile short*)address, (short)value); + _InterlockedExchange16((volatile short*)ptr, (short)value); } static inline void -_Py_atomic_store_uint32(uint32_t *address, uint32_t value) +_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value) { - _InterlockedExchange((volatile long*)address, (long)value); + _InterlockedExchange((volatile long*)ptr, (long)value); } static inline void -_Py_atomic_store_uint64(uint64_t *address, uint64_t value) +_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value) { - _Py_atomic_exchange_int64((int64_t *)address, (int64_t)value); + _Py_atomic_exchange_int64((int64_t *)ptr, (int64_t)value); } static inline void -_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value) { - _InterlockedExchangePointer((void * volatile *)address, (void *)value); + _InterlockedExchangePointer((void * volatile *)ptr, (void *)value); } static inline void -_Py_atomic_store_uint(unsigned int *address, unsigned int value) +_Py_atomic_store_uint(unsigned int *ptr, unsigned int value) { - _InterlockedExchange((volatile long*)address, (long)value); + _InterlockedExchange((volatile long*)ptr, (long)value); } static inline void -_Py_atomic_store_ptr(void *address, void *value) +_Py_atomic_store_ptr(void *ptr, void *value) { - _InterlockedExchangePointer((void * volatile *)address, (void *)value); + _InterlockedExchangePointer((void * volatile *)ptr, (void *)value); } static inline void -_Py_atomic_store_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *ptr, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - _InterlockedExchange64((volatile __int64 *)address, (__int64)value); + _InterlockedExchange64((volatile __int64 *)ptr, (__int64)value); #else - _InterlockedExchange((volatile long*)address, (long)value); + _InterlockedExchange((volatile long*)ptr, (long)value); #endif } static inline void -_Py_atomic_store_int_relaxed(int *address, int value) +_Py_atomic_store_int_relaxed(int *ptr, int value) { - *(volatile int *)address = value; + *(volatile int *)ptr = value; } static inline void -_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value) { - *(volatile int8_t *)address = value; + *(volatile int8_t *)ptr = value; } static inline void -_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value) { - *(volatile int16_t *)address = value; + *(volatile int16_t *)ptr = value; } static inline void -_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value) { - *(volatile int32_t *)address = value; + *(volatile int32_t *)ptr = value; } static inline void -_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value) { - *(volatile int64_t *)address = value; + *(volatile int64_t *)ptr = value; } static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value) { - *(volatile intptr_t *)address = value; + *(volatile intptr_t *)ptr = value; } static inline void -_Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *ptr, uint8_t value) { - *(volatile uint8_t *)address = value; + *(volatile uint8_t *)ptr = value; } static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value) { - *(volatile uint16_t *)address = value; + *(volatile uint16_t *)ptr = value; } static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value) { - *(volatile uint32_t *)address = value; + *(volatile uint32_t *)ptr = value; } static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value) { - *(volatile uint64_t *)address = value; + *(volatile uint64_t *)ptr = value; } static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value) { - *(volatile uintptr_t *)address = value; + *(volatile uintptr_t *)ptr = value; } static inline void -_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value) { - *(volatile unsigned int *)address = value; + *(volatile unsigned int *)ptr = value; } static inline void -_Py_atomic_store_ptr_relaxed(void *address, void* value) +_Py_atomic_store_ptr_relaxed(void *ptr, void* value) { - *(void * volatile *)address = value; + *(void * volatile *)ptr = value; } static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value) { - *(volatile Py_ssize_t *)address = value; + *(volatile Py_ssize_t *)ptr = value; } static inline void * -_Py_atomic_load_ptr_acquire(const void *address) +_Py_atomic_load_ptr_acquire(const void *ptr) { #if defined(_M_X64) || defined(_M_IX86) - return *(void * volatile *)address; + return *(void * volatile *)ptr; #elif defined(_M_ARM64) - return (void *)__ldar64((unsigned __int64 volatile*)address); + return (void *)__ldar64((unsigned __int64 volatile*)ptr); #else #error no implementation of _Py_atomic_load_ptr_acquire #endif } static inline void -_Py_atomic_store_ptr_release(void *address, void *value) +_Py_atomic_store_ptr_release(void *ptr, void *value) { #if defined(_M_X64) || defined(_M_IX86) - *(void * volatile *)address = value; + *(void * volatile *)ptr = value; #elif defined(_M_ARM64) - __stlr64(address, (uintptr_t)value); + __stlr64(ptr, (uintptr_t)value); #else #error no implementation of _Py_atomic_store_ptr_release #endif diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index ddabb009ea49ab..b1416a2950f1a4 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -19,812 +19,812 @@ extern "C++" { static inline int -_Py_atomic_add_int(int *address, int value) +_Py_atomic_add_int(int *ptr, int value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int)*)address, value); + return atomic_fetch_add((_Atomic(int)*)ptr, value); } static inline int8_t -_Py_atomic_add_int8(int8_t *address, int8_t value) +_Py_atomic_add_int8(int8_t *ptr, int8_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int8_t)*)address, value); + return atomic_fetch_add((_Atomic(int8_t)*)ptr, value); } static inline int16_t -_Py_atomic_add_int16(int16_t *address, int16_t value) +_Py_atomic_add_int16(int16_t *ptr, int16_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int16_t)*)address, value); + return atomic_fetch_add((_Atomic(int16_t)*)ptr, value); } static inline int32_t -_Py_atomic_add_int32(int32_t *address, int32_t value) +_Py_atomic_add_int32(int32_t *ptr, int32_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int32_t)*)address, value); + return atomic_fetch_add((_Atomic(int32_t)*)ptr, value); } static inline int64_t -_Py_atomic_add_int64(int64_t *address, int64_t value) +_Py_atomic_add_int64(int64_t *ptr, int64_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int64_t)*)address, value); + return atomic_fetch_add((_Atomic(int64_t)*)ptr, value); } static inline intptr_t -_Py_atomic_add_intptr(intptr_t *address, intptr_t value) +_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(intptr_t)*)address, value); + return atomic_fetch_add((_Atomic(intptr_t)*)ptr, value); } static inline unsigned int -_Py_atomic_add_uint(unsigned int *address, unsigned int value) +_Py_atomic_add_uint(unsigned int *ptr, unsigned int value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(unsigned int)*)address, value); + return atomic_fetch_add((_Atomic(unsigned int)*)ptr, value); } static inline uint8_t -_Py_atomic_add_uint8(uint8_t *address, uint8_t value) +_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint8_t)*)address, value); + return atomic_fetch_add((_Atomic(uint8_t)*)ptr, value); } static inline uint16_t -_Py_atomic_add_uint16(uint16_t *address, uint16_t value) +_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint16_t)*)address, value); + return atomic_fetch_add((_Atomic(uint16_t)*)ptr, value); } static inline uint32_t -_Py_atomic_add_uint32(uint32_t *address, uint32_t value) +_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint32_t)*)address, value); + return atomic_fetch_add((_Atomic(uint32_t)*)ptr, value); } static inline uint64_t -_Py_atomic_add_uint64(uint64_t *address, uint64_t value) +_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint64_t)*)address, value); + return atomic_fetch_add((_Atomic(uint64_t)*)ptr, value); } static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uintptr_t)*)address, value); + return atomic_fetch_add((_Atomic(uintptr_t)*)ptr, value); } static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(Py_ssize_t)*)address, value); + return atomic_fetch_add((_Atomic(Py_ssize_t)*)ptr, value); } static inline int -_Py_atomic_compare_exchange_int(int *address, int expected, int value) +_Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(int)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_int8(int8_t *address, int8_t expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int8_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(int8_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_int16(int16_t *address, int16_t expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int16_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(int16_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_int32(int32_t *address, int32_t expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int32_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(int32_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_int64(int64_t *address, int64_t expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int64_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(int64_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *address, intptr_t expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(intptr_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(intptr_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_uint(unsigned int *address, unsigned int expected, unsigned int value) +_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(unsigned int)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(unsigned int)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *address, uint8_t expected, uint8_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint8_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint8_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *address, uint16_t expected, uint16_t value) +_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint16_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint16_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *address, uint32_t expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint32_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint32_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *address, uint64_t expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint64_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(uint64_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *address, uintptr_t expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *address, Py_ssize_t expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)ptr, + expected, desired); } static inline int -_Py_atomic_compare_exchange_ptr(void *address, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(void *)*)address, - &expected, value); + return atomic_compare_exchange_strong((_Atomic(void *)*)ptr, + (void **)expected, desired); } static inline int -_Py_atomic_exchange_int(int *address, int value) +_Py_atomic_exchange_int(int *ptr, int value) { _Py_USING_STD - return atomic_exchange((_Atomic(int)*)address, value); + return atomic_exchange((_Atomic(int)*)ptr, value); } static inline int8_t -_Py_atomic_exchange_int8(int8_t *address, int8_t value) +_Py_atomic_exchange_int8(int8_t *ptr, int8_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int8_t)*)address, value); + return atomic_exchange((_Atomic(int8_t)*)ptr, value); } static inline int16_t -_Py_atomic_exchange_int16(int16_t *address, int16_t value) +_Py_atomic_exchange_int16(int16_t *ptr, int16_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int16_t)*)address, value); + return atomic_exchange((_Atomic(int16_t)*)ptr, value); } static inline int32_t -_Py_atomic_exchange_int32(int32_t *address, int32_t value) +_Py_atomic_exchange_int32(int32_t *ptr, int32_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int32_t)*)address, value); + return atomic_exchange((_Atomic(int32_t)*)ptr, value); } static inline int64_t -_Py_atomic_exchange_int64(int64_t *address, int64_t value) +_Py_atomic_exchange_int64(int64_t *ptr, int64_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int64_t)*)address, value); + return atomic_exchange((_Atomic(int64_t)*)ptr, value); } static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *address, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(intptr_t)*)address, value); + return atomic_exchange((_Atomic(intptr_t)*)ptr, value); } static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *address, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value) { _Py_USING_STD - return atomic_exchange((_Atomic(unsigned int)*)address, value); + return atomic_exchange((_Atomic(unsigned int)*)ptr, value); } static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *address, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint8_t)*)address, value); + return atomic_exchange((_Atomic(uint8_t)*)ptr, value); } static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *address, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint16_t)*)address, value); + return atomic_exchange((_Atomic(uint16_t)*)ptr, value); } static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *address, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint32_t)*)address, value); + return atomic_exchange((_Atomic(uint32_t)*)ptr, value); } static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *address, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint64_t)*)address, value); + return atomic_exchange((_Atomic(uint64_t)*)ptr, value); } static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uintptr_t)*)address, value); + return atomic_exchange((_Atomic(uintptr_t)*)ptr, value); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(Py_ssize_t)*)address, value); + return atomic_exchange((_Atomic(Py_ssize_t)*)ptr, value); } static inline void * -_Py_atomic_exchange_ptr(void *address, void *value) +_Py_atomic_exchange_ptr(void *ptr, void *value) { _Py_USING_STD - return atomic_exchange((_Atomic(void *)*)address, value); + return atomic_exchange((_Atomic(void *)*)ptr, value); } static inline uint8_t -_Py_atomic_and_uint8(uint8_t *address, uint8_t value) +_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint8_t)*)address, value); + return atomic_fetch_and((_Atomic(uint8_t)*)ptr, value); } static inline uint16_t -_Py_atomic_and_uint16(uint16_t *address, uint16_t value) +_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint16_t)*)address, value); + return atomic_fetch_and((_Atomic(uint16_t)*)ptr, value); } static inline uint32_t -_Py_atomic_and_uint32(uint32_t *address, uint32_t value) +_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint32_t)*)address, value); + return atomic_fetch_and((_Atomic(uint32_t)*)ptr, value); } static inline uint64_t -_Py_atomic_and_uint64(uint64_t *address, uint64_t value) +_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint64_t)*)address, value); + return atomic_fetch_and((_Atomic(uint64_t)*)ptr, value); } static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uintptr_t)*)address, value); + return atomic_fetch_and((_Atomic(uintptr_t)*)ptr, value); } static inline uint8_t -_Py_atomic_or_uint8(uint8_t *address, uint8_t value) +_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint8_t)*)address, value); + return atomic_fetch_or((_Atomic(uint8_t)*)ptr, value); } static inline uint16_t -_Py_atomic_or_uint16(uint16_t *address, uint16_t value) +_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint16_t)*)address, value); + return atomic_fetch_or((_Atomic(uint16_t)*)ptr, value); } static inline uint32_t -_Py_atomic_or_uint32(uint32_t *address, uint32_t value) +_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint32_t)*)address, value); + return atomic_fetch_or((_Atomic(uint32_t)*)ptr, value); } static inline uint64_t -_Py_atomic_or_uint64(uint64_t *address, uint64_t value) +_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint64_t)*)address, value); + return atomic_fetch_or((_Atomic(uint64_t)*)ptr, value); } static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uintptr_t)*)address, value); + return atomic_fetch_or((_Atomic(uintptr_t)*)ptr, value); } static inline int -_Py_atomic_load_int(const int *address) +_Py_atomic_load_int(const int *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(int)*)address); + return atomic_load((const _Atomic(int)*)ptr); } static inline int8_t -_Py_atomic_load_int8(const int8_t *address) +_Py_atomic_load_int8(const int8_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(int8_t)*)address); + return atomic_load((const _Atomic(int8_t)*)ptr); } static inline int16_t -_Py_atomic_load_int16(const int16_t *address) +_Py_atomic_load_int16(const int16_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(int16_t)*)address); + return atomic_load((const _Atomic(int16_t)*)ptr); } static inline int32_t -_Py_atomic_load_int32(const int32_t *address) +_Py_atomic_load_int32(const int32_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(int32_t)*)address); + return atomic_load((const _Atomic(int32_t)*)ptr); } static inline int64_t -_Py_atomic_load_int64(const int64_t *address) +_Py_atomic_load_int64(const int64_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(int64_t)*)address); + return atomic_load((const _Atomic(int64_t)*)ptr); } static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *address) +_Py_atomic_load_intptr(const intptr_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(intptr_t)*)address); + return atomic_load((const _Atomic(intptr_t)*)ptr); } static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *address) +_Py_atomic_load_uint8(const uint8_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(uint8_t)*)address); + return atomic_load((const _Atomic(uint8_t)*)ptr); } static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *address) +_Py_atomic_load_uint16(const uint16_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(uint32_t)*)address); + return atomic_load((const _Atomic(uint32_t)*)ptr); } static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *address) +_Py_atomic_load_uint32(const uint32_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(uint32_t)*)address); + return atomic_load((const _Atomic(uint32_t)*)ptr); } static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *address) +_Py_atomic_load_uint64(const uint64_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(uint64_t)*)address); + return atomic_load((const _Atomic(uint64_t)*)ptr); } static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *address) +_Py_atomic_load_uintptr(const uintptr_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(uintptr_t)*)address); + return atomic_load((const _Atomic(uintptr_t)*)ptr); } static inline unsigned int -_Py_atomic_load_uint(const unsigned int *address) +_Py_atomic_load_uint(const unsigned int *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(unsigned int)*)address); + return atomic_load((const _Atomic(unsigned int)*)ptr); } static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *address) +_Py_atomic_load_ssize(const Py_ssize_t *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(Py_ssize_t)*)address); + return atomic_load((const _Atomic(Py_ssize_t)*)ptr); } static inline void * -_Py_atomic_load_ptr(const void *address) +_Py_atomic_load_ptr(const void *ptr) { _Py_USING_STD - return atomic_load((const _Atomic(void*)*)address); + return atomic_load((const _Atomic(void*)*)ptr); } static inline int -_Py_atomic_load_int_relaxed(const int *address) +_Py_atomic_load_int_relaxed(const int *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int)*)address, + return atomic_load_explicit((const _Atomic(int)*)ptr, memory_order_relaxed); } static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *address) +_Py_atomic_load_int8_relaxed(const int8_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int8_t)*)address, + return atomic_load_explicit((const _Atomic(int8_t)*)ptr, memory_order_relaxed); } static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *address) +_Py_atomic_load_int16_relaxed(const int16_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int16_t)*)address, + return atomic_load_explicit((const _Atomic(int16_t)*)ptr, memory_order_relaxed); } static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *address) +_Py_atomic_load_int32_relaxed(const int32_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int32_t)*)address, + return atomic_load_explicit((const _Atomic(int32_t)*)ptr, memory_order_relaxed); } static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *address) +_Py_atomic_load_int64_relaxed(const int64_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int64_t)*)address, + return atomic_load_explicit((const _Atomic(int64_t)*)ptr, memory_order_relaxed); } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *address) +_Py_atomic_load_intptr_relaxed(const intptr_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(intptr_t)*)address, + return atomic_load_explicit((const _Atomic(intptr_t)*)ptr, memory_order_relaxed); } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *address) +_Py_atomic_load_uint8_relaxed(const uint8_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint8_t)*)address, + return atomic_load_explicit((const _Atomic(uint8_t)*)ptr, memory_order_relaxed); } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *address) +_Py_atomic_load_uint16_relaxed(const uint16_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint16_t)*)address, + return atomic_load_explicit((const _Atomic(uint16_t)*)ptr, memory_order_relaxed); } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *address) +_Py_atomic_load_uint32_relaxed(const uint32_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint32_t)*)address, + return atomic_load_explicit((const _Atomic(uint32_t)*)ptr, memory_order_relaxed); } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *address) +_Py_atomic_load_uint64_relaxed(const uint64_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint64_t)*)address, + return atomic_load_explicit((const _Atomic(uint64_t)*)ptr, memory_order_relaxed); } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *address) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uintptr_t)*)address, + return atomic_load_explicit((const _Atomic(uintptr_t)*)ptr, memory_order_relaxed); } static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *address) +_Py_atomic_load_uint_relaxed(const unsigned int *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(unsigned int)*)address, + return atomic_load_explicit((const _Atomic(unsigned int)*)ptr, memory_order_relaxed); } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *address) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(Py_ssize_t)*)address, + return atomic_load_explicit((const _Atomic(Py_ssize_t)*)ptr, memory_order_relaxed); } static inline void * -_Py_atomic_load_ptr_relaxed(const void *address) +_Py_atomic_load_ptr_relaxed(const void *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(void*)*)address, + return atomic_load_explicit((const _Atomic(void*)*)ptr, memory_order_relaxed); } static inline void -_Py_atomic_store_int(int *address, int value) +_Py_atomic_store_int(int *ptr, int value) { _Py_USING_STD - atomic_store((_Atomic(int)*)address, value); + atomic_store((_Atomic(int)*)ptr, value); } static inline void -_Py_atomic_store_int8(int8_t *address, int8_t value) +_Py_atomic_store_int8(int8_t *ptr, int8_t value) { _Py_USING_STD - atomic_store((_Atomic(int8_t)*)address, value); + atomic_store((_Atomic(int8_t)*)ptr, value); } static inline void -_Py_atomic_store_int16(int16_t *address, int16_t value) +_Py_atomic_store_int16(int16_t *ptr, int16_t value) { _Py_USING_STD - atomic_store((_Atomic(int16_t)*)address, value); + atomic_store((_Atomic(int16_t)*)ptr, value); } static inline void -_Py_atomic_store_int32(int32_t *address, int32_t value) +_Py_atomic_store_int32(int32_t *ptr, int32_t value) { _Py_USING_STD - atomic_store((_Atomic(int32_t)*)address, value); + atomic_store((_Atomic(int32_t)*)ptr, value); } static inline void -_Py_atomic_store_int64(int64_t *address, int64_t value) +_Py_atomic_store_int64(int64_t *ptr, int64_t value) { _Py_USING_STD - atomic_store((_Atomic(int64_t)*)address, value); + atomic_store((_Atomic(int64_t)*)ptr, value); } static inline void -_Py_atomic_store_intptr(intptr_t *address, intptr_t value) +_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value) { _Py_USING_STD - atomic_store((_Atomic(intptr_t)*)address, value); + atomic_store((_Atomic(intptr_t)*)ptr, value); } static inline void -_Py_atomic_store_uint8(uint8_t *address, uint8_t value) +_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value) { _Py_USING_STD - atomic_store((_Atomic(uint8_t)*)address, value); + atomic_store((_Atomic(uint8_t)*)ptr, value); } static inline void -_Py_atomic_store_uint16(uint16_t *address, uint16_t value) +_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value) { _Py_USING_STD - atomic_store((_Atomic(uint16_t)*)address, value); + atomic_store((_Atomic(uint16_t)*)ptr, value); } static inline void -_Py_atomic_store_uint32(uint32_t *address, uint32_t value) +_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value) { _Py_USING_STD - atomic_store((_Atomic(uint32_t)*)address, value); + atomic_store((_Atomic(uint32_t)*)ptr, value); } static inline void -_Py_atomic_store_uint64(uint64_t *address, uint64_t value) +_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value) { _Py_USING_STD - atomic_store((_Atomic(uint64_t)*)address, value); + atomic_store((_Atomic(uint64_t)*)ptr, value); } static inline void -_Py_atomic_store_uintptr(uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value) { _Py_USING_STD - atomic_store((_Atomic(uintptr_t)*)address, value); + atomic_store((_Atomic(uintptr_t)*)ptr, value); } static inline void -_Py_atomic_store_uint(unsigned int *address, unsigned int value) +_Py_atomic_store_uint(unsigned int *ptr, unsigned int value) { _Py_USING_STD - atomic_store((_Atomic(unsigned int)*)address, value); + atomic_store((_Atomic(unsigned int)*)ptr, value); } static inline void -_Py_atomic_store_ptr(void *address, void *value) +_Py_atomic_store_ptr(void *ptr, void *value) { _Py_USING_STD - atomic_store((_Atomic(void*)*)address, value); + atomic_store((_Atomic(void*)*)ptr, value); } static inline void -_Py_atomic_store_ssize(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *ptr, Py_ssize_t value) { _Py_USING_STD - atomic_store((_Atomic(Py_ssize_t)*)address, value); + atomic_store((_Atomic(Py_ssize_t)*)ptr, value); } static inline void -_Py_atomic_store_int_relaxed(int *address, int value) +_Py_atomic_store_int_relaxed(int *ptr, int value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int)*)address, value, + atomic_store_explicit((_Atomic(int)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int8_relaxed(int8_t *address, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int8_t)*)address, value, + atomic_store_explicit((_Atomic(int8_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int16_relaxed(int16_t *address, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int16_t)*)address, value, + atomic_store_explicit((_Atomic(int16_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int32_relaxed(int32_t *address, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int32_t)*)address, value, + atomic_store_explicit((_Atomic(int32_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int64_relaxed(int64_t *address, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int64_t)*)address, value, + atomic_store_explicit((_Atomic(int64_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *address, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(intptr_t)*)address, value, + atomic_store_explicit((_Atomic(intptr_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint8_relaxed(uint8_t *address, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *ptr, uint8_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint8_t)*)address, value, + atomic_store_explicit((_Atomic(uint8_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *address, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint16_t)*)address, value, + atomic_store_explicit((_Atomic(uint16_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *address, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint32_t)*)address, value, + atomic_store_explicit((_Atomic(uint32_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *address, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint64_t)*)address, value, + atomic_store_explicit((_Atomic(uint64_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *address, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uintptr_t)*)address, value, + atomic_store_explicit((_Atomic(uintptr_t)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint_relaxed(unsigned int *address, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value) { _Py_USING_STD - atomic_store_explicit((_Atomic(unsigned int)*)address, value, + atomic_store_explicit((_Atomic(unsigned int)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_ptr_relaxed(void *address, void *value) +_Py_atomic_store_ptr_relaxed(void *ptr, void *value) { _Py_USING_STD - atomic_store_explicit((_Atomic(void*)*)address, value, + atomic_store_explicit((_Atomic(void*)*)ptr, value, memory_order_relaxed); } static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *address, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(Py_ssize_t)*)address, value, + atomic_store_explicit((_Atomic(Py_ssize_t)*)ptr, value, memory_order_relaxed); } static inline void * -_Py_atomic_load_ptr_acquire(const void *address) +_Py_atomic_load_ptr_acquire(const void *ptr) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(void*)*)address, + return atomic_load_explicit((const _Atomic(void*)*)ptr, memory_order_acquire); } static inline void -_Py_atomic_store_ptr_release(void *address, void *value) +_Py_atomic_store_ptr_release(void *ptr, void *value) { _Py_USING_STD - atomic_store_explicit((_Atomic(void*)*)address, value, + atomic_store_explicit((_Atomic(void*)*)ptr, value, memory_order_release); } diff --git a/Modules/_testcapi/pyatomic.c b/Modules/_testcapi/pyatomic.c index 08bd6c91b87114..15602ce3f4ab3b 100644 --- a/Modules/_testcapi/pyatomic.c +++ b/Modules/_testcapi/pyatomic.c @@ -64,12 +64,15 @@ test_atomic_compare_exchange_##suffix(PyObject *self, PyObject *obj) { \ dtype x = (dtype)0; \ dtype y = (dtype)1; \ dtype z = (dtype)2; \ - assert(_Py_atomic_compare_exchange_##suffix(&x, y, z) == 0); \ + assert(_Py_atomic_compare_exchange_##suffix(&x, &y, z) == 0); \ assert(x == 0); \ - assert(_Py_atomic_compare_exchange_##suffix(&x, 0, z) == 1); \ + assert(y == 0); \ + assert(_Py_atomic_compare_exchange_##suffix(&x, &y, z) == 1); \ assert(x == z); \ - assert(_Py_atomic_compare_exchange_##suffix(&x, y, z) == 0); \ + assert(y == 0); \ + assert(_Py_atomic_compare_exchange_##suffix(&x, &y, z) == 0); \ assert(x == z); \ + assert(y == z); \ Py_RETURN_NONE; \ } FOR_ALL_TYPES(IMPL_TEST_COMPARE_EXCHANGE) From a8e25383da585c6bd0a8da167d971b2a4cc75975 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 30 Aug 2023 12:01:32 -0400 Subject: [PATCH 23/24] Update Include/cpython/pyatomic.h Co-authored-by: Victor Stinner --- Include/cpython/pyatomic.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 8a95f7b49a7b7e..258c907fd10200 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -66,8 +66,8 @@ _Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value); // Performs an atomic compare-and-exchange. If `*ptr` and `*expected` are // equal, then `desired` is stored in `*ptr`. Otherwise `*expected` is updated // with the current value of `*ptr`. Returns 1 on success and 0 on failure. -// These correspond to the "strong" variations of the C11 -// atomic_compare_exchange_* functions. +// These correspond to the C11 +// atomic_compare_exchange_strong() function. static inline int _Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired); From 433319fc38fd8e3e1b2db1a31dc1769c1c814b81 Mon Sep 17 00:00:00 2001 From: Sam Gross Date: Wed, 30 Aug 2023 09:10:30 -0700 Subject: [PATCH 24/24] Rename ptr to obj --- Include/cpython/pyatomic.h | 246 ++++++++-------- Include/cpython/pyatomic_gcc.h | 436 ++++++++++++++-------------- Include/cpython/pyatomic_msc.h | 500 ++++++++++++++++----------------- Include/cpython/pyatomic_std.h | 436 ++++++++++++++-------------- 4 files changed, 809 insertions(+), 809 deletions(-) diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 258c907fd10200..70669159cf5b1a 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -23,350 +23,350 @@ #ifndef Py_ATOMIC_H #define Py_ATOMIC_H -// Atomically adds `value` to `ptr` and returns the previous value +// Atomically adds `value` to `obj` and returns the previous value static inline int -_Py_atomic_add_int(int *ptr, int value); +_Py_atomic_add_int(int *obj, int value); static inline int8_t -_Py_atomic_add_int8(int8_t *ptr, int8_t value); +_Py_atomic_add_int8(int8_t *obj, int8_t value); static inline int16_t -_Py_atomic_add_int16(int16_t *ptr, int16_t value); +_Py_atomic_add_int16(int16_t *obj, int16_t value); static inline int32_t -_Py_atomic_add_int32(int32_t *ptr, int32_t value); +_Py_atomic_add_int32(int32_t *obj, int32_t value); static inline int64_t -_Py_atomic_add_int64(int64_t *ptr, int64_t value); +_Py_atomic_add_int64(int64_t *obj, int64_t value); static inline intptr_t -_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value); +_Py_atomic_add_intptr(intptr_t *obj, intptr_t value); static inline unsigned int -_Py_atomic_add_uint(unsigned int *ptr, unsigned int value); +_Py_atomic_add_uint(unsigned int *obj, unsigned int value); static inline uint8_t -_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value); +_Py_atomic_add_uint8(uint8_t *obj, uint8_t value); static inline uint16_t -_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value); +_Py_atomic_add_uint16(uint16_t *obj, uint16_t value); static inline uint32_t -_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value); +_Py_atomic_add_uint32(uint32_t *obj, uint32_t value); static inline uint64_t -_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value); +_Py_atomic_add_uint64(uint64_t *obj, uint64_t value); static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value); +_Py_atomic_add_uintptr(uintptr_t *obj, uintptr_t value); static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value); +_Py_atomic_add_ssize(Py_ssize_t *obj, Py_ssize_t value); -// Performs an atomic compare-and-exchange. If `*ptr` and `*expected` are -// equal, then `desired` is stored in `*ptr`. Otherwise `*expected` is updated -// with the current value of `*ptr`. Returns 1 on success and 0 on failure. +// Performs an atomic compare-and-exchange. If `*obj` and `*expected` are +// equal, then `desired` is stored in `*obj`. Otherwise `*expected` is updated +// with the current value of `*obj`. Returns 1 on success and 0 on failure. // These correspond to the C11 // atomic_compare_exchange_strong() function. static inline int -_Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired); +_Py_atomic_compare_exchange_int(int *obj, int *expected, int desired); static inline int -_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t desired); +_Py_atomic_compare_exchange_int8(int8_t *obj, int8_t *expected, int8_t desired); static inline int -_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t desired); +_Py_atomic_compare_exchange_int16(int16_t *obj, int16_t *expected, int16_t desired); static inline int -_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t desired); +_Py_atomic_compare_exchange_int32(int32_t *obj, int32_t *expected, int32_t desired); static inline int -_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t desired); +_Py_atomic_compare_exchange_int64(int64_t *obj, int64_t *expected, int64_t desired); static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t desired); +_Py_atomic_compare_exchange_intptr(intptr_t *obj, intptr_t *expected, intptr_t desired); static inline int -_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int desired); +_Py_atomic_compare_exchange_uint(unsigned int *obj, unsigned int *expected, unsigned int desired); static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t desired); +_Py_atomic_compare_exchange_uint8(uint8_t *obj, uint8_t *expected, uint8_t desired); static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t desired); +_Py_atomic_compare_exchange_uint16(uint16_t *obj, uint16_t *expected, uint16_t desired); static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t desired); +_Py_atomic_compare_exchange_uint32(uint32_t *obj, uint32_t *expected, uint32_t desired); static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t desired); +_Py_atomic_compare_exchange_uint64(uint64_t *obj, uint64_t *expected, uint64_t desired); static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t desired); +_Py_atomic_compare_exchange_uintptr(uintptr_t *obj, uintptr_t *expected, uintptr_t desired); static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t desired); +_Py_atomic_compare_exchange_ssize(Py_ssize_t *obj, Py_ssize_t *expected, Py_ssize_t desired); -// NOTE: `ptr` and `expected` are logically `void**` types, but we use `void*` +// NOTE: `obj` and `expected` are logically `void**` types, but we use `void*` // so that we can pass types like `PyObject**` without a cast. static inline int -_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *value); +_Py_atomic_compare_exchange_ptr(void *obj, void *expected, void *value); -// Atomically replaces `*ptr` with `value` and returns the previous value of `*ptr`. +// Atomically replaces `*obj` with `value` and returns the previous value of `*obj`. static inline int -_Py_atomic_exchange_int(int *ptr, int value); +_Py_atomic_exchange_int(int *obj, int value); static inline int8_t -_Py_atomic_exchange_int8(int8_t *ptr, int8_t value); +_Py_atomic_exchange_int8(int8_t *obj, int8_t value); static inline int16_t -_Py_atomic_exchange_int16(int16_t *ptr, int16_t value); +_Py_atomic_exchange_int16(int16_t *obj, int16_t value); static inline int32_t -_Py_atomic_exchange_int32(int32_t *ptr, int32_t value); +_Py_atomic_exchange_int32(int32_t *obj, int32_t value); static inline int64_t -_Py_atomic_exchange_int64(int64_t *ptr, int64_t value); +_Py_atomic_exchange_int64(int64_t *obj, int64_t value); static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value); +_Py_atomic_exchange_intptr(intptr_t *obj, intptr_t value); static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value); +_Py_atomic_exchange_uint(unsigned int *obj, unsigned int value); static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value); +_Py_atomic_exchange_uint8(uint8_t *obj, uint8_t value); static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value); +_Py_atomic_exchange_uint16(uint16_t *obj, uint16_t value); static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value); +_Py_atomic_exchange_uint32(uint32_t *obj, uint32_t value); static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value); +_Py_atomic_exchange_uint64(uint64_t *obj, uint64_t value); static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value); +_Py_atomic_exchange_uintptr(uintptr_t *obj, uintptr_t value); static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value); +_Py_atomic_exchange_ssize(Py_ssize_t *obj, Py_ssize_t value); static inline void * -_Py_atomic_exchange_ptr(void *ptr, void *value); +_Py_atomic_exchange_ptr(void *obj, void *value); -// Performs `*ptr &= value` atomically and returns the previous value of `*ptr`. +// Performs `*obj &= value` atomically and returns the previous value of `*obj`. static inline uint8_t -_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value); +_Py_atomic_and_uint8(uint8_t *obj, uint8_t value); static inline uint16_t -_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value); +_Py_atomic_and_uint16(uint16_t *obj, uint16_t value); static inline uint32_t -_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value); +_Py_atomic_and_uint32(uint32_t *obj, uint32_t value); static inline uint64_t -_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value); +_Py_atomic_and_uint64(uint64_t *obj, uint64_t value); static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value); +_Py_atomic_and_uintptr(uintptr_t *obj, uintptr_t value); -// Performs `*ptr |= value` atomically and returns the previous value of `*ptr`. +// Performs `*obj |= value` atomically and returns the previous value of `*obj`. static inline uint8_t -_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value); +_Py_atomic_or_uint8(uint8_t *obj, uint8_t value); static inline uint16_t -_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value); +_Py_atomic_or_uint16(uint16_t *obj, uint16_t value); static inline uint32_t -_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value); +_Py_atomic_or_uint32(uint32_t *obj, uint32_t value); static inline uint64_t -_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value); +_Py_atomic_or_uint64(uint64_t *obj, uint64_t value); static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value); +_Py_atomic_or_uintptr(uintptr_t *obj, uintptr_t value); -// Atomically loads `*ptr` (sequential consistency) +// Atomically loads `*obj` (sequential consistency) static inline int -_Py_atomic_load_int(const int *ptr); +_Py_atomic_load_int(const int *obj); static inline int8_t -_Py_atomic_load_int8(const int8_t *ptr); +_Py_atomic_load_int8(const int8_t *obj); static inline int16_t -_Py_atomic_load_int16(const int16_t *ptr); +_Py_atomic_load_int16(const int16_t *obj); static inline int32_t -_Py_atomic_load_int32(const int32_t *ptr); +_Py_atomic_load_int32(const int32_t *obj); static inline int64_t -_Py_atomic_load_int64(const int64_t *ptr); +_Py_atomic_load_int64(const int64_t *obj); static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *ptr); +_Py_atomic_load_intptr(const intptr_t *obj); static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *ptr); +_Py_atomic_load_uint8(const uint8_t *obj); static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *ptr); +_Py_atomic_load_uint16(const uint16_t *obj); static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *ptr); +_Py_atomic_load_uint32(const uint32_t *obj); static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *ptr); +_Py_atomic_load_uint64(const uint64_t *obj); static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *ptr); +_Py_atomic_load_uintptr(const uintptr_t *obj); static inline unsigned int -_Py_atomic_load_uint(const unsigned int *ptr); +_Py_atomic_load_uint(const unsigned int *obj); static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *ptr); +_Py_atomic_load_ssize(const Py_ssize_t *obj); static inline void * -_Py_atomic_load_ptr(const void *ptr); +_Py_atomic_load_ptr(const void *obj); -// Loads `*ptr` (relaxed consistency, i.e., no ordering) +// Loads `*obj` (relaxed consistency, i.e., no ordering) static inline int -_Py_atomic_load_int_relaxed(const int *ptr); +_Py_atomic_load_int_relaxed(const int *obj); static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *ptr); +_Py_atomic_load_int8_relaxed(const int8_t *obj); static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *ptr); +_Py_atomic_load_int16_relaxed(const int16_t *obj); static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *ptr); +_Py_atomic_load_int32_relaxed(const int32_t *obj); static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *ptr); +_Py_atomic_load_int64_relaxed(const int64_t *obj); static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *ptr); +_Py_atomic_load_intptr_relaxed(const intptr_t *obj); static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *ptr); +_Py_atomic_load_uint8_relaxed(const uint8_t *obj); static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *ptr); +_Py_atomic_load_uint16_relaxed(const uint16_t *obj); static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *ptr); +_Py_atomic_load_uint32_relaxed(const uint32_t *obj); static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *ptr); +_Py_atomic_load_uint64_relaxed(const uint64_t *obj); static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr); +_Py_atomic_load_uintptr_relaxed(const uintptr_t *obj); static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *ptr); +_Py_atomic_load_uint_relaxed(const unsigned int *obj); static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr); +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *obj); static inline void * -_Py_atomic_load_ptr_relaxed(const void *ptr); +_Py_atomic_load_ptr_relaxed(const void *obj); -// Atomically performs `*ptr = value` (sequential consistency) +// Atomically performs `*obj = value` (sequential consistency) static inline void -_Py_atomic_store_int(int *ptr, int value); +_Py_atomic_store_int(int *obj, int value); static inline void -_Py_atomic_store_int8(int8_t *ptr, int8_t value); +_Py_atomic_store_int8(int8_t *obj, int8_t value); static inline void -_Py_atomic_store_int16(int16_t *ptr, int16_t value); +_Py_atomic_store_int16(int16_t *obj, int16_t value); static inline void -_Py_atomic_store_int32(int32_t *ptr, int32_t value); +_Py_atomic_store_int32(int32_t *obj, int32_t value); static inline void -_Py_atomic_store_int64(int64_t *ptr, int64_t value); +_Py_atomic_store_int64(int64_t *obj, int64_t value); static inline void -_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value); +_Py_atomic_store_intptr(intptr_t *obj, intptr_t value); static inline void -_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value); +_Py_atomic_store_uint8(uint8_t *obj, uint8_t value); static inline void -_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value); +_Py_atomic_store_uint16(uint16_t *obj, uint16_t value); static inline void -_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value); +_Py_atomic_store_uint32(uint32_t *obj, uint32_t value); static inline void -_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value); +_Py_atomic_store_uint64(uint64_t *obj, uint64_t value); static inline void -_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value); +_Py_atomic_store_uintptr(uintptr_t *obj, uintptr_t value); static inline void -_Py_atomic_store_uint(unsigned int *ptr, unsigned int value); +_Py_atomic_store_uint(unsigned int *obj, unsigned int value); static inline void -_Py_atomic_store_ptr(void *ptr, void *value); +_Py_atomic_store_ptr(void *obj, void *value); static inline void -_Py_atomic_store_ssize(Py_ssize_t* ptr, Py_ssize_t value); +_Py_atomic_store_ssize(Py_ssize_t* obj, Py_ssize_t value); -// Stores `*ptr = value` (relaxed consistency, i.e., no ordering) +// Stores `*obj = value` (relaxed consistency, i.e., no ordering) static inline void -_Py_atomic_store_int_relaxed(int *ptr, int value); +_Py_atomic_store_int_relaxed(int *obj, int value); static inline void -_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value); +_Py_atomic_store_int8_relaxed(int8_t *obj, int8_t value); static inline void -_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value); +_Py_atomic_store_int16_relaxed(int16_t *obj, int16_t value); static inline void -_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value); +_Py_atomic_store_int32_relaxed(int32_t *obj, int32_t value); static inline void -_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value); +_Py_atomic_store_int64_relaxed(int64_t *obj, int64_t value); static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value); +_Py_atomic_store_intptr_relaxed(intptr_t *obj, intptr_t value); static inline void -_Py_atomic_store_uint8_relaxed(uint8_t* ptr, uint8_t value); +_Py_atomic_store_uint8_relaxed(uint8_t* obj, uint8_t value); static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value); +_Py_atomic_store_uint16_relaxed(uint16_t *obj, uint16_t value); static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value); +_Py_atomic_store_uint32_relaxed(uint32_t *obj, uint32_t value); static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value); +_Py_atomic_store_uint64_relaxed(uint64_t *obj, uint64_t value); static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value); +_Py_atomic_store_uintptr_relaxed(uintptr_t *obj, uintptr_t value); static inline void -_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value); +_Py_atomic_store_uint_relaxed(unsigned int *obj, unsigned int value); static inline void -_Py_atomic_store_ptr_relaxed(void *ptr, void *value); +_Py_atomic_store_ptr_relaxed(void *obj, void *value); static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value); +_Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value); -// Loads `*ptr` (acquire operation) +// Loads `*obj` (acquire operation) static inline void * -_Py_atomic_load_ptr_acquire(const void *ptr); +_Py_atomic_load_ptr_acquire(const void *obj); -// Stores `*ptr = value` (release operation) +// Stores `*obj = value` (release operation) static inline void -_Py_atomic_store_ptr_release(void *ptr, void *value); +_Py_atomic_store_ptr_release(void *obj, void *value); // Sequential consistency fence. C11 fences have complex semantics. When diff --git a/Include/cpython/pyatomic_gcc.h b/Include/cpython/pyatomic_gcc.h index 356b62a70a598d..3ae2719d10de5c 100644 --- a/Include/cpython/pyatomic_gcc.h +++ b/Include/cpython/pyatomic_gcc.h @@ -8,674 +8,674 @@ #endif static inline int -_Py_atomic_add_int(int *ptr, int value) +_Py_atomic_add_int(int *obj, int value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_add_int8(int8_t *ptr, int8_t value) +_Py_atomic_add_int8(int8_t *obj, int8_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_add_int16(int16_t *ptr, int16_t value) +_Py_atomic_add_int16(int16_t *obj, int16_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_add_int32(int32_t *ptr, int32_t value) +_Py_atomic_add_int32(int32_t *obj, int32_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_add_int64(int64_t *ptr, int64_t value) +_Py_atomic_add_int64(int64_t *obj, int64_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_add_intptr(intptr_t *obj, intptr_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_add_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_add_uint(unsigned int *obj, unsigned int value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_add_uint8(uint8_t *obj, uint8_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_add_uint16(uint16_t *obj, uint16_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_add_uint32(uint32_t *obj, uint32_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_add_uint64(uint64_t *obj, uint64_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *obj, uintptr_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *obj, Py_ssize_t value) { - return __atomic_fetch_add(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_add(obj, value, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired) +_Py_atomic_compare_exchange_int(int *obj, int *expected, int desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t desired) +_Py_atomic_compare_exchange_int8(int8_t *obj, int8_t *expected, int8_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t desired) +_Py_atomic_compare_exchange_int16(int16_t *obj, int16_t *expected, int16_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t desired) +_Py_atomic_compare_exchange_int32(int32_t *obj, int32_t *expected, int32_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t desired) +_Py_atomic_compare_exchange_int64(int64_t *obj, int64_t *expected, int64_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t desired) +_Py_atomic_compare_exchange_intptr(intptr_t *obj, intptr_t *expected, intptr_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int desired) +_Py_atomic_compare_exchange_uint(unsigned int *obj, unsigned int *expected, unsigned int desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t desired) +_Py_atomic_compare_exchange_uint8(uint8_t *obj, uint8_t *expected, uint8_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t desired) +_Py_atomic_compare_exchange_uint16(uint16_t *obj, uint16_t *expected, uint16_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t desired) +_Py_atomic_compare_exchange_uint32(uint32_t *obj, uint32_t *expected, uint32_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t desired) +_Py_atomic_compare_exchange_uint64(uint64_t *obj, uint64_t *expected, uint64_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t desired) +_Py_atomic_compare_exchange_uintptr(uintptr_t *obj, uintptr_t *expected, uintptr_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t desired) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *obj, Py_ssize_t *expected, Py_ssize_t desired) { - return __atomic_compare_exchange_n(ptr, expected, desired, 0, + return __atomic_compare_exchange_n(obj, expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *desired) +_Py_atomic_compare_exchange_ptr(void *obj, void *expected, void *desired) { - return __atomic_compare_exchange_n((void **)ptr, (void **)expected, desired, 0, + return __atomic_compare_exchange_n((void **)obj, (void **)expected, desired, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_exchange_int(int *ptr, int value) +_Py_atomic_exchange_int(int *obj, int value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_exchange_int8(int8_t *ptr, int8_t value) +_Py_atomic_exchange_int8(int8_t *obj, int8_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_exchange_int16(int16_t *ptr, int16_t value) +_Py_atomic_exchange_int16(int16_t *obj, int16_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_exchange_int32(int32_t *ptr, int32_t value) +_Py_atomic_exchange_int32(int32_t *obj, int32_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_exchange_int64(int64_t *ptr, int64_t value) +_Py_atomic_exchange_int64(int64_t *obj, int64_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *obj, intptr_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *obj, unsigned int value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *obj, uint8_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *obj, uint16_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *obj, uint32_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *obj, uint64_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *obj, uintptr_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *obj, Py_ssize_t value) { - return __atomic_exchange_n(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n(obj, value, __ATOMIC_SEQ_CST); } static inline void * -_Py_atomic_exchange_ptr(void *ptr, void *value) +_Py_atomic_exchange_ptr(void *obj, void *value) { - return __atomic_exchange_n((void **)ptr, value, __ATOMIC_SEQ_CST); + return __atomic_exchange_n((void **)obj, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_and_uint8(uint8_t *obj, uint8_t value) { - return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(obj, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_and_uint16(uint16_t *obj, uint16_t value) { - return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(obj, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_and_uint32(uint32_t *obj, uint32_t value) { - return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(obj, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_and_uint64(uint64_t *obj, uint64_t value) { - return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(obj, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *obj, uintptr_t value) { - return __atomic_fetch_and(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(obj, value, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_or_uint8(uint8_t *obj, uint8_t value) { - return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(obj, value, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_or_uint16(uint16_t *obj, uint16_t value) { - return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(obj, value, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_or_uint32(uint32_t *obj, uint32_t value) { - return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(obj, value, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_or_uint64(uint64_t *obj, uint64_t value) { - return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(obj, value, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *obj, uintptr_t value) { - return __atomic_fetch_or(ptr, value, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(obj, value, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_load_int(const int *ptr) +_Py_atomic_load_int(const int *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline int8_t -_Py_atomic_load_int8(const int8_t *ptr) +_Py_atomic_load_int8(const int8_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline int16_t -_Py_atomic_load_int16(const int16_t *ptr) +_Py_atomic_load_int16(const int16_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline int32_t -_Py_atomic_load_int32(const int32_t *ptr) +_Py_atomic_load_int32(const int32_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline int64_t -_Py_atomic_load_int64(const int64_t *ptr) +_Py_atomic_load_int64(const int64_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *ptr) +_Py_atomic_load_intptr(const intptr_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *ptr) +_Py_atomic_load_uint8(const uint8_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *ptr) +_Py_atomic_load_uint16(const uint16_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *ptr) +_Py_atomic_load_uint32(const uint32_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *ptr) +_Py_atomic_load_uint64(const uint64_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *ptr) +_Py_atomic_load_uintptr(const uintptr_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline unsigned int -_Py_atomic_load_uint(const unsigned int *ptr) +_Py_atomic_load_uint(const unsigned int *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *ptr) +_Py_atomic_load_ssize(const Py_ssize_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); } static inline void * -_Py_atomic_load_ptr(const void *ptr) +_Py_atomic_load_ptr(const void *obj) { - return (void *)__atomic_load_n((void **)ptr, __ATOMIC_SEQ_CST); + return (void *)__atomic_load_n((void **)obj, __ATOMIC_SEQ_CST); } static inline int -_Py_atomic_load_int_relaxed(const int *ptr) +_Py_atomic_load_int_relaxed(const int *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *ptr) +_Py_atomic_load_int8_relaxed(const int8_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *ptr) +_Py_atomic_load_int16_relaxed(const int16_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *ptr) +_Py_atomic_load_int32_relaxed(const int32_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *ptr) +_Py_atomic_load_int64_relaxed(const int64_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *ptr) +_Py_atomic_load_intptr_relaxed(const intptr_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *ptr) +_Py_atomic_load_uint8_relaxed(const uint8_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *ptr) +_Py_atomic_load_uint16_relaxed(const uint16_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *ptr) +_Py_atomic_load_uint32_relaxed(const uint32_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *ptr) +_Py_atomic_load_uint64_relaxed(const uint64_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *ptr) +_Py_atomic_load_uint_relaxed(const unsigned int *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *obj) { - return __atomic_load_n(ptr, __ATOMIC_RELAXED); + return __atomic_load_n(obj, __ATOMIC_RELAXED); } static inline void * -_Py_atomic_load_ptr_relaxed(const void *ptr) +_Py_atomic_load_ptr_relaxed(const void *obj) { - return (void *)__atomic_load_n((const void **)ptr, __ATOMIC_RELAXED); + return (void *)__atomic_load_n((const void **)obj, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int(int *ptr, int value) +_Py_atomic_store_int(int *obj, int value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int8(int8_t *ptr, int8_t value) +_Py_atomic_store_int8(int8_t *obj, int8_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int16(int16_t *ptr, int16_t value) +_Py_atomic_store_int16(int16_t *obj, int16_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int32(int32_t *ptr, int32_t value) +_Py_atomic_store_int32(int32_t *obj, int32_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int64(int64_t *ptr, int64_t value) +_Py_atomic_store_int64(int64_t *obj, int64_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_store_intptr(intptr_t *obj, intptr_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_store_uint8(uint8_t *obj, uint8_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_store_uint16(uint16_t *obj, uint16_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_store_uint32(uint32_t *obj, uint32_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_store_uint64(uint64_t *obj, uint64_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *obj, uintptr_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_store_uint(unsigned int *obj, unsigned int value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_ptr(void *ptr, void *value) +_Py_atomic_store_ptr(void *obj, void *value) { - __atomic_store_n((void **)ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n((void **)obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *obj, Py_ssize_t value) { - __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST); + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); } static inline void -_Py_atomic_store_int_relaxed(int *ptr, int value) +_Py_atomic_store_int_relaxed(int *obj, int value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *obj, int8_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *obj, int16_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *obj, int32_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *obj, int64_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *obj, intptr_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint8_relaxed(uint8_t *ptr, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *obj, uint8_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *obj, uint16_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *obj, uint32_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *obj, uint64_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *obj, uintptr_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *obj, unsigned int value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_ptr_relaxed(void *ptr, void *value) +_Py_atomic_store_ptr_relaxed(void *obj, void *value) { - __atomic_store_n((void **)ptr, value, __ATOMIC_RELAXED); + __atomic_store_n((void **)obj, value, __ATOMIC_RELAXED); } static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value) { - __atomic_store_n(ptr, value, __ATOMIC_RELAXED); + __atomic_store_n(obj, value, __ATOMIC_RELAXED); } static inline void * -_Py_atomic_load_ptr_acquire(const void *ptr) +_Py_atomic_load_ptr_acquire(const void *obj) { - return (void *)__atomic_load_n((void **)ptr, __ATOMIC_ACQUIRE); + return (void *)__atomic_load_n((void **)obj, __ATOMIC_ACQUIRE); } static inline void -_Py_atomic_store_ptr_release(void *ptr, void *value) +_Py_atomic_store_ptr_release(void *obj, void *value) { - __atomic_store_n((void **)ptr, value, __ATOMIC_RELEASE); + __atomic_store_n((void **)obj, value, __ATOMIC_RELEASE); } static inline void diff --git a/Include/cpython/pyatomic_msc.h b/Include/cpython/pyatomic_msc.h index 77da14efd9dd25..81ec05c3db9e0e 100644 --- a/Include/cpython/pyatomic_msc.h +++ b/Include/cpython/pyatomic_msc.h @@ -16,43 +16,43 @@ #include static inline int -_Py_atomic_add_int(int *ptr, int value) +_Py_atomic_add_int(int *obj, int value) { Py_BUILD_ASSERT(sizeof(int) == sizeof(long)); - return (int)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (int)_InterlockedExchangeAdd((volatile long*)obj, (long)value); } static inline int8_t -_Py_atomic_add_int8(int8_t *ptr, int8_t value) +_Py_atomic_add_int8(int8_t *obj, int8_t value) { Py_BUILD_ASSERT(sizeof(int8_t) == sizeof(char)); - return (int8_t)_InterlockedExchangeAdd8((volatile char*)ptr, (char)value); + return (int8_t)_InterlockedExchangeAdd8((volatile char*)obj, (char)value); } static inline int16_t -_Py_atomic_add_int16(int16_t *ptr, int16_t value) +_Py_atomic_add_int16(int16_t *obj, int16_t value) { Py_BUILD_ASSERT(sizeof(int16_t) == sizeof(short)); - return (int16_t)_InterlockedExchangeAdd16((volatile short*)ptr, (short)value); + return (int16_t)_InterlockedExchangeAdd16((volatile short*)obj, (short)value); } static inline int32_t -_Py_atomic_add_int32(int32_t *ptr, int32_t value) +_Py_atomic_add_int32(int32_t *obj, int32_t value) { Py_BUILD_ASSERT(sizeof(int32_t) == sizeof(long)); - return (int32_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (int32_t)_InterlockedExchangeAdd((volatile long*)obj, (long)value); } static inline int64_t -_Py_atomic_add_int64(int64_t *ptr, int64_t value) +_Py_atomic_add_int64(int64_t *obj, int64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); + return (int64_t)_InterlockedExchangeAdd64((volatile __int64*)obj, (__int64)value); #else - int64_t old_value = *(volatile int64_t*)ptr; + int64_t old_value = *(volatile int64_t*)obj; for (;;) { int64_t new_value = old_value + value; - if (_Py_atomic_compare_exchange_int64(ptr, &old_value, new_value)) { + if (_Py_atomic_compare_exchange_int64(obj, &old_value, new_value)) { return old_value; } } @@ -60,70 +60,70 @@ _Py_atomic_add_int64(int64_t *ptr, int64_t value) } static inline intptr_t -_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_add_intptr(intptr_t *obj, intptr_t value) { #if SIZEOF_VOID_P == 8 - return (intptr_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); + return (intptr_t)_InterlockedExchangeAdd64((volatile __int64*)obj, (__int64)value); #else - return (intptr_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (intptr_t)_InterlockedExchangeAdd((volatile long*)obj, (long)value); #endif } static inline unsigned int -_Py_atomic_add_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_add_uint(unsigned int *obj, unsigned int value) { - return (unsigned int)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (unsigned int)_InterlockedExchangeAdd((volatile long*)obj, (long)value); } static inline uint8_t -_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_add_uint8(uint8_t *obj, uint8_t value) { - return (uint8_t)_InterlockedExchangeAdd8((volatile char*)ptr, (char)value); + return (uint8_t)_InterlockedExchangeAdd8((volatile char*)obj, (char)value); } static inline uint16_t -_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_add_uint16(uint16_t *obj, uint16_t value) { - return (uint16_t)_InterlockedExchangeAdd16((volatile short*)ptr, (short)value); + return (uint16_t)_InterlockedExchangeAdd16((volatile short*)obj, (short)value); } static inline uint32_t -_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_add_uint32(uint32_t *obj, uint32_t value) { - return (uint32_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (uint32_t)_InterlockedExchangeAdd((volatile long*)obj, (long)value); } static inline uint64_t -_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_add_uint64(uint64_t *obj, uint64_t value) { - return (uint64_t)_Py_atomic_add_int64((int64_t*)ptr, (int64_t)value); + return (uint64_t)_Py_atomic_add_int64((int64_t*)obj, (int64_t)value); } static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *obj, uintptr_t value) { #if SIZEOF_VOID_P == 8 - return (uintptr_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); + return (uintptr_t)_InterlockedExchangeAdd64((volatile __int64*)obj, (__int64)value); #else - return (uintptr_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (uintptr_t)_InterlockedExchangeAdd((volatile long*)obj, (long)value); #endif } static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *obj, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - return (Py_ssize_t)_InterlockedExchangeAdd64((volatile __int64*)ptr, (__int64)value); + return (Py_ssize_t)_InterlockedExchangeAdd64((volatile __int64*)obj, (__int64)value); #else - return (Py_ssize_t)_InterlockedExchangeAdd((volatile long*)ptr, (long)value); + return (Py_ssize_t)_InterlockedExchangeAdd((volatile long*)obj, (long)value); #endif } static inline int -_Py_atomic_compare_exchange_int(int *ptr, int *expected, int value) +_Py_atomic_compare_exchange_int(int *obj, int *expected, int value) { - int initial = (int)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + int initial = (int)_InterlockedCompareExchange((volatile long*)obj, (long)value, (long)*expected); if (initial == *expected) { return 1; } @@ -132,9 +132,9 @@ _Py_atomic_compare_exchange_int(int *ptr, int *expected, int value) } static inline int -_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t value) +_Py_atomic_compare_exchange_int8(int8_t *obj, int8_t *expected, int8_t value) { - int8_t initial = (int8_t)_InterlockedCompareExchange8((volatile char*)ptr, (char)value, (char)*expected); + int8_t initial = (int8_t)_InterlockedCompareExchange8((volatile char*)obj, (char)value, (char)*expected); if (initial == *expected) { return 1; } @@ -143,9 +143,9 @@ _Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t value) } static inline int -_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t value) +_Py_atomic_compare_exchange_int16(int16_t *obj, int16_t *expected, int16_t value) { - int16_t initial = (int16_t)_InterlockedCompareExchange16((volatile short*)ptr, (short)value, (short)*expected); + int16_t initial = (int16_t)_InterlockedCompareExchange16((volatile short*)obj, (short)value, (short)*expected); if (initial == *expected) { return 1; } @@ -154,9 +154,9 @@ _Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t value } static inline int -_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t value) +_Py_atomic_compare_exchange_int32(int32_t *obj, int32_t *expected, int32_t value) { - int32_t initial = (int32_t)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + int32_t initial = (int32_t)_InterlockedCompareExchange((volatile long*)obj, (long)value, (long)*expected); if (initial == *expected) { return 1; } @@ -165,9 +165,9 @@ _Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t value } static inline int -_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t value) +_Py_atomic_compare_exchange_int64(int64_t *obj, int64_t *expected, int64_t value) { - int64_t initial = (int64_t)_InterlockedCompareExchange64((volatile __int64*)ptr, (__int64)value, (__int64)*expected); + int64_t initial = (int64_t)_InterlockedCompareExchange64((volatile __int64*)obj, (__int64)value, (__int64)*expected); if (initial == *expected) { return 1; } @@ -176,9 +176,9 @@ _Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t value } static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t value) +_Py_atomic_compare_exchange_intptr(intptr_t *obj, intptr_t *expected, intptr_t value) { - intptr_t initial = (intptr_t)_InterlockedCompareExchangePointer((void * volatile *)ptr, (void *)value, (void *)*expected); + intptr_t initial = (intptr_t)_InterlockedCompareExchangePointer((void * volatile *)obj, (void *)value, (void *)*expected); if (initial == *expected) { return 1; } @@ -187,9 +187,9 @@ _Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t v } static inline int -_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int value) +_Py_atomic_compare_exchange_uint(unsigned int *obj, unsigned int *expected, unsigned int value) { - unsigned int initial = (unsigned int)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + unsigned int initial = (unsigned int)_InterlockedCompareExchange((volatile long*)obj, (long)value, (long)*expected); if (initial == *expected) { return 1; } @@ -198,9 +198,9 @@ _Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsi } static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t value) +_Py_atomic_compare_exchange_uint8(uint8_t *obj, uint8_t *expected, uint8_t value) { - uint8_t initial = (uint8_t)_InterlockedCompareExchange8((volatile char*)ptr, (char)value, (char)*expected); + uint8_t initial = (uint8_t)_InterlockedCompareExchange8((volatile char*)obj, (char)value, (char)*expected); if (initial == *expected) { return 1; } @@ -209,9 +209,9 @@ _Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t value } static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t value) +_Py_atomic_compare_exchange_uint16(uint16_t *obj, uint16_t *expected, uint16_t value) { - uint16_t initial = (uint16_t)_InterlockedCompareExchange16((volatile short*)ptr, (short)value, (short)*expected); + uint16_t initial = (uint16_t)_InterlockedCompareExchange16((volatile short*)obj, (short)value, (short)*expected); if (initial == *expected) { return 1; } @@ -220,9 +220,9 @@ _Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t v } static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t value) +_Py_atomic_compare_exchange_uint32(uint32_t *obj, uint32_t *expected, uint32_t value) { - uint32_t initial = (uint32_t)_InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + uint32_t initial = (uint32_t)_InterlockedCompareExchange((volatile long*)obj, (long)value, (long)*expected); if (initial == *expected) { return 1; } @@ -231,9 +231,9 @@ _Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t v } static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t value) +_Py_atomic_compare_exchange_uint64(uint64_t *obj, uint64_t *expected, uint64_t value) { - uint64_t initial = (uint64_t)_InterlockedCompareExchange64((volatile __int64*)ptr, (__int64)value, (__int64)*expected); + uint64_t initial = (uint64_t)_InterlockedCompareExchange64((volatile __int64*)obj, (__int64)value, (__int64)*expected); if (initial == *expected) { return 1; } @@ -242,9 +242,9 @@ _Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t v } static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t value) +_Py_atomic_compare_exchange_uintptr(uintptr_t *obj, uintptr_t *expected, uintptr_t value) { - uintptr_t initial = (uintptr_t)_InterlockedCompareExchangePointer((void * volatile *)ptr, (void *)value, (void *)*expected); + uintptr_t initial = (uintptr_t)_InterlockedCompareExchangePointer((void * volatile *)obj, (void *)value, (void *)*expected); if (initial == *expected) { return 1; } @@ -253,13 +253,13 @@ _Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr } static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t value) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *obj, Py_ssize_t *expected, Py_ssize_t value) { Py_ssize_t initial = #if SIZEOF_SIZE_T == 8 - _InterlockedCompareExchange64((volatile __int64*)ptr, (__int64)value, (__int64)*expected); + _InterlockedCompareExchange64((volatile __int64*)obj, (__int64)value, (__int64)*expected); #else - _InterlockedCompareExchange((volatile long*)ptr, (long)value, (long)*expected); + _InterlockedCompareExchange((volatile long*)obj, (long)value, (long)*expected); #endif if (initial == *expected) { return 1; @@ -269,9 +269,9 @@ _Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssiz } static inline int -_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *value) +_Py_atomic_compare_exchange_ptr(void *obj, void *expected, void *value) { - void *initial = _InterlockedCompareExchangePointer((void **)ptr, value, *(void **)expected); + void *initial = _InterlockedCompareExchangePointer((void **)obj, value, *(void **)expected); if (initial == *(void **)expected) { return 1; } @@ -280,38 +280,38 @@ _Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *value) } static inline int -_Py_atomic_exchange_int(int *ptr, int value) +_Py_atomic_exchange_int(int *obj, int value) { - return (int)_InterlockedExchange((volatile long*)ptr, (long)value); + return (int)_InterlockedExchange((volatile long*)obj, (long)value); } static inline int8_t -_Py_atomic_exchange_int8(int8_t *ptr, int8_t value) +_Py_atomic_exchange_int8(int8_t *obj, int8_t value) { - return (int8_t)_InterlockedExchange8((volatile char*)ptr, (char)value); + return (int8_t)_InterlockedExchange8((volatile char*)obj, (char)value); } static inline int16_t -_Py_atomic_exchange_int16(int16_t *ptr, int16_t value) +_Py_atomic_exchange_int16(int16_t *obj, int16_t value) { - return (int16_t)_InterlockedExchange16((volatile short*)ptr, (short)value); + return (int16_t)_InterlockedExchange16((volatile short*)obj, (short)value); } static inline int32_t -_Py_atomic_exchange_int32(int32_t *ptr, int32_t value) +_Py_atomic_exchange_int32(int32_t *obj, int32_t value) { - return (int32_t)_InterlockedExchange((volatile long*)ptr, (long)value); + return (int32_t)_InterlockedExchange((volatile long*)obj, (long)value); } static inline int64_t -_Py_atomic_exchange_int64(int64_t *ptr, int64_t value) +_Py_atomic_exchange_int64(int64_t *obj, int64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (int64_t)_InterlockedExchange64((volatile __int64*)ptr, (__int64)value); + return (int64_t)_InterlockedExchange64((volatile __int64*)obj, (__int64)value); #else - int64_t old_value = *(volatile int64_t*)ptr; + int64_t old_value = *(volatile int64_t*)obj; for (;;) { - if (_Py_atomic_compare_exchange_int64(ptr, &old_value, value)) { + if (_Py_atomic_compare_exchange_int64(obj, &old_value, value)) { return old_value; } } @@ -319,91 +319,91 @@ _Py_atomic_exchange_int64(int64_t *ptr, int64_t value) } static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *obj, intptr_t value) { - return (intptr_t)_InterlockedExchangePointer((void * volatile *)ptr, (void *)value); + return (intptr_t)_InterlockedExchangePointer((void * volatile *)obj, (void *)value); } static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *obj, unsigned int value) { - return (unsigned int)_InterlockedExchange((volatile long*)ptr, (long)value); + return (unsigned int)_InterlockedExchange((volatile long*)obj, (long)value); } static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *obj, uint8_t value) { - return (uint8_t)_InterlockedExchange8((volatile char*)ptr, (char)value); + return (uint8_t)_InterlockedExchange8((volatile char*)obj, (char)value); } static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *obj, uint16_t value) { - return (uint16_t)_InterlockedExchange16((volatile short*)ptr, (short)value); + return (uint16_t)_InterlockedExchange16((volatile short*)obj, (short)value); } static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *obj, uint32_t value) { - return (uint32_t)_InterlockedExchange((volatile long*)ptr, (long)value); + return (uint32_t)_InterlockedExchange((volatile long*)obj, (long)value); } static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *obj, uint64_t value) { - return (uint64_t)_Py_atomic_exchange_int64((int64_t *)ptr, (int64_t)value); + return (uint64_t)_Py_atomic_exchange_int64((int64_t *)obj, (int64_t)value); } static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *obj, uintptr_t value) { - return (uintptr_t)_InterlockedExchangePointer((void * volatile *)ptr, (void *)value); + return (uintptr_t)_InterlockedExchangePointer((void * volatile *)obj, (void *)value); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *obj, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - return (Py_ssize_t)_InterlockedExchange64((volatile __int64*)ptr, (__int64)value); + return (Py_ssize_t)_InterlockedExchange64((volatile __int64*)obj, (__int64)value); #else - return (Py_ssize_t)_InterlockedExchange((volatile long*)ptr, (long)value); + return (Py_ssize_t)_InterlockedExchange((volatile long*)obj, (long)value); #endif } static inline void * -_Py_atomic_exchange_ptr(void *ptr, void *value) +_Py_atomic_exchange_ptr(void *obj, void *value) { - return (void *)_InterlockedExchangePointer((void * volatile *)ptr, (void *)value); + return (void *)_InterlockedExchangePointer((void * volatile *)obj, (void *)value); } static inline uint8_t -_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_and_uint8(uint8_t *obj, uint8_t value) { - return (uint8_t)_InterlockedAnd8((volatile char*)ptr, (char)value); + return (uint8_t)_InterlockedAnd8((volatile char*)obj, (char)value); } static inline uint16_t -_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_and_uint16(uint16_t *obj, uint16_t value) { - return (uint16_t)_InterlockedAnd16((volatile short*)ptr, (short)value); + return (uint16_t)_InterlockedAnd16((volatile short*)obj, (short)value); } static inline uint32_t -_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_and_uint32(uint32_t *obj, uint32_t value) { - return (uint32_t)_InterlockedAnd((volatile long*)ptr, (long)value); + return (uint32_t)_InterlockedAnd((volatile long*)obj, (long)value); } static inline uint64_t -_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_and_uint64(uint64_t *obj, uint64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (uint64_t)_InterlockedAnd64((volatile __int64*)ptr, (__int64)value); + return (uint64_t)_InterlockedAnd64((volatile __int64*)obj, (__int64)value); #else - uint64_t old_value = *(volatile uint64_t*)ptr; + uint64_t old_value = *(volatile uint64_t*)obj; for (;;) { uint64_t new_value = old_value & value; - if (_Py_atomic_compare_exchange_uint64(ptr, &old_value, new_value)) { + if (_Py_atomic_compare_exchange_uint64(obj, &old_value, new_value)) { return old_value; } } @@ -411,43 +411,43 @@ _Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) } static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *obj, uintptr_t value) { #if SIZEOF_VOID_P == 8 - return (uintptr_t)_InterlockedAnd64((volatile __int64*)ptr, (__int64)value); + return (uintptr_t)_InterlockedAnd64((volatile __int64*)obj, (__int64)value); #else - return (uintptr_t)_InterlockedAnd((volatile long*)ptr, (long)value); + return (uintptr_t)_InterlockedAnd((volatile long*)obj, (long)value); #endif } static inline uint8_t -_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_or_uint8(uint8_t *obj, uint8_t value) { - return (uint8_t)_InterlockedOr8((volatile char*)ptr, (char)value); + return (uint8_t)_InterlockedOr8((volatile char*)obj, (char)value); } static inline uint16_t -_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_or_uint16(uint16_t *obj, uint16_t value) { - return (uint16_t)_InterlockedOr16((volatile short*)ptr, (short)value); + return (uint16_t)_InterlockedOr16((volatile short*)obj, (short)value); } static inline uint32_t -_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_or_uint32(uint32_t *obj, uint32_t value) { - return (uint32_t)_InterlockedOr((volatile long*)ptr, (long)value); + return (uint32_t)_InterlockedOr((volatile long*)obj, (long)value); } static inline uint64_t -_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_or_uint64(uint64_t *obj, uint64_t value) { #if defined(_M_X64) || defined(_M_ARM64) - return (uint64_t)_InterlockedOr64((volatile __int64*)ptr, (__int64)value); + return (uint64_t)_InterlockedOr64((volatile __int64*)obj, (__int64)value); #else - uint64_t old_value = *(volatile uint64_t *)ptr; + uint64_t old_value = *(volatile uint64_t *)obj; for (;;) { uint64_t new_value = old_value | value; - if (_Py_atomic_compare_exchange_uint64(ptr, &old_value, new_value)) { + if (_Py_atomic_compare_exchange_uint64(obj, &old_value, new_value)) { return old_value; } } @@ -455,460 +455,460 @@ _Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) } static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *obj, uintptr_t value) { #if SIZEOF_VOID_P == 8 - return (uintptr_t)_InterlockedOr64((volatile __int64*)ptr, (__int64)value); + return (uintptr_t)_InterlockedOr64((volatile __int64*)obj, (__int64)value); #else - return (uintptr_t)_InterlockedOr((volatile long*)ptr, (long)value); + return (uintptr_t)_InterlockedOr((volatile long*)obj, (long)value); #endif } static inline int -_Py_atomic_load_int(const int *ptr) +_Py_atomic_load_int(const int *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int *)ptr; + return *(volatile int *)obj; #elif defined(_M_ARM64) - return (int)__ldar32((unsigned __int32 volatile*)ptr); + return (int)__ldar32((unsigned __int32 volatile*)obj); #else #error no implementation of _Py_atomic_load_int #endif } static inline int8_t -_Py_atomic_load_int8(const int8_t *ptr) +_Py_atomic_load_int8(const int8_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int8_t *)ptr; + return *(volatile int8_t *)obj; #elif defined(_M_ARM64) - return (int8_t)__ldar8((unsigned __int8 volatile*)ptr); + return (int8_t)__ldar8((unsigned __int8 volatile*)obj); #else #error no implementation of _Py_atomic_load_int8 #endif } static inline int16_t -_Py_atomic_load_int16(const int16_t *ptr) +_Py_atomic_load_int16(const int16_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int16_t *)ptr; + return *(volatile int16_t *)obj; #elif defined(_M_ARM64) - return (int16_t)__ldar16((unsigned __int16 volatile*)ptr); + return (int16_t)__ldar16((unsigned __int16 volatile*)obj); #else #error no implementation of _Py_atomic_load_int16 #endif } static inline int32_t -_Py_atomic_load_int32(const int32_t *ptr) +_Py_atomic_load_int32(const int32_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int32_t *)ptr; + return *(volatile int32_t *)obj; #elif defined(_M_ARM64) - return (int32_t)__ldar32((unsigned __int32 volatile*)ptr); + return (int32_t)__ldar32((unsigned __int32 volatile*)obj); #else #error no implementation of _Py_atomic_load_int32 #endif } static inline int64_t -_Py_atomic_load_int64(const int64_t *ptr) +_Py_atomic_load_int64(const int64_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile int64_t *)ptr; + return *(volatile int64_t *)obj; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)ptr); + return __ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_int64 #endif } static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *ptr) +_Py_atomic_load_intptr(const intptr_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile intptr_t *)ptr; + return *(volatile intptr_t *)obj; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)ptr); + return __ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_intptr #endif } static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *ptr) +_Py_atomic_load_uint8(const uint8_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint8_t *)ptr; + return *(volatile uint8_t *)obj; #elif defined(_M_ARM64) - return __ldar8((unsigned __int8 volatile*)ptr); + return __ldar8((unsigned __int8 volatile*)obj); #else #error no implementation of _Py_atomic_load_uint8 #endif } static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *ptr) +_Py_atomic_load_uint16(const uint16_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint16_t *)ptr; + return *(volatile uint16_t *)obj; #elif defined(_M_ARM64) - return __ldar16((unsigned __int16 volatile*)ptr); + return __ldar16((unsigned __int16 volatile*)obj); #else #error no implementation of _Py_atomic_load_uint16 #endif } static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *ptr) +_Py_atomic_load_uint32(const uint32_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint32_t *)ptr; + return *(volatile uint32_t *)obj; #elif defined(_M_ARM64) - return __ldar32((unsigned __int32 volatile*)ptr); + return __ldar32((unsigned __int32 volatile*)obj); #else #error no implementation of _Py_atomic_load_uint32 #endif } static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *ptr) +_Py_atomic_load_uint64(const uint64_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uint64_t *)ptr; + return *(volatile uint64_t *)obj; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)ptr); + return __ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_uint64 #endif } static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *ptr) +_Py_atomic_load_uintptr(const uintptr_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile uintptr_t *)ptr; + return *(volatile uintptr_t *)obj; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)ptr); + return __ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_uintptr #endif } static inline unsigned int -_Py_atomic_load_uint(const unsigned int *ptr) +_Py_atomic_load_uint(const unsigned int *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile unsigned int *)ptr; + return *(volatile unsigned int *)obj; #elif defined(_M_ARM64) - return __ldar32((unsigned __int32 volatile*)ptr); + return __ldar32((unsigned __int32 volatile*)obj); #else #error no implementation of _Py_atomic_load_uint #endif } static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *ptr) +_Py_atomic_load_ssize(const Py_ssize_t *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(volatile Py_ssize_t *)ptr; + return *(volatile Py_ssize_t *)obj; #elif defined(_M_ARM64) - return __ldar64((unsigned __int64 volatile*)ptr); + return __ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_ssize #endif } static inline void * -_Py_atomic_load_ptr(const void *ptr) +_Py_atomic_load_ptr(const void *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(void * volatile *)ptr; + return *(void * volatile *)obj; #elif defined(_M_ARM64) - return (void *)__ldar64((unsigned __int64 volatile*)ptr); + return (void *)__ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_ptr #endif } static inline int -_Py_atomic_load_int_relaxed(const int *ptr) +_Py_atomic_load_int_relaxed(const int *obj) { - return *(volatile int *)ptr; + return *(volatile int *)obj; } static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *ptr) +_Py_atomic_load_int8_relaxed(const int8_t *obj) { - return *(volatile int8_t *)ptr; + return *(volatile int8_t *)obj; } static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *ptr) +_Py_atomic_load_int16_relaxed(const int16_t *obj) { - return *(volatile int16_t *)ptr; + return *(volatile int16_t *)obj; } static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *ptr) +_Py_atomic_load_int32_relaxed(const int32_t *obj) { - return *(volatile int32_t *)ptr; + return *(volatile int32_t *)obj; } static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *ptr) +_Py_atomic_load_int64_relaxed(const int64_t *obj) { - return *(volatile int64_t *)ptr; + return *(volatile int64_t *)obj; } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *ptr) +_Py_atomic_load_intptr_relaxed(const intptr_t *obj) { - return *(volatile intptr_t *)ptr; + return *(volatile intptr_t *)obj; } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *ptr) +_Py_atomic_load_uint8_relaxed(const uint8_t *obj) { - return *(volatile uint8_t *)ptr; + return *(volatile uint8_t *)obj; } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *ptr) +_Py_atomic_load_uint16_relaxed(const uint16_t *obj) { - return *(volatile uint16_t *)ptr; + return *(volatile uint16_t *)obj; } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *ptr) +_Py_atomic_load_uint32_relaxed(const uint32_t *obj) { - return *(volatile uint32_t *)ptr; + return *(volatile uint32_t *)obj; } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *ptr) +_Py_atomic_load_uint64_relaxed(const uint64_t *obj) { - return *(volatile uint64_t *)ptr; + return *(volatile uint64_t *)obj; } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *obj) { - return *(volatile uintptr_t *)ptr; + return *(volatile uintptr_t *)obj; } static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *ptr) +_Py_atomic_load_uint_relaxed(const unsigned int *obj) { - return *(volatile unsigned int *)ptr; + return *(volatile unsigned int *)obj; } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *obj) { - return *(volatile Py_ssize_t *)ptr; + return *(volatile Py_ssize_t *)obj; } static inline void* -_Py_atomic_load_ptr_relaxed(const void *ptr) +_Py_atomic_load_ptr_relaxed(const void *obj) { - return *(void * volatile *)ptr; + return *(void * volatile *)obj; } static inline void -_Py_atomic_store_int(int *ptr, int value) +_Py_atomic_store_int(int *obj, int value) { - _InterlockedExchange((volatile long*)ptr, (long)value); + _InterlockedExchange((volatile long*)obj, (long)value); } static inline void -_Py_atomic_store_int8(int8_t *ptr, int8_t value) +_Py_atomic_store_int8(int8_t *obj, int8_t value) { - _InterlockedExchange8((volatile char*)ptr, (char)value); + _InterlockedExchange8((volatile char*)obj, (char)value); } static inline void -_Py_atomic_store_int16(int16_t *ptr, int16_t value) +_Py_atomic_store_int16(int16_t *obj, int16_t value) { - _InterlockedExchange16((volatile short*)ptr, (short)value); + _InterlockedExchange16((volatile short*)obj, (short)value); } static inline void -_Py_atomic_store_int32(int32_t *ptr, int32_t value) +_Py_atomic_store_int32(int32_t *obj, int32_t value) { - _InterlockedExchange((volatile long*)ptr, (long)value); + _InterlockedExchange((volatile long*)obj, (long)value); } static inline void -_Py_atomic_store_int64(int64_t *ptr, int64_t value) +_Py_atomic_store_int64(int64_t *obj, int64_t value) { - _Py_atomic_exchange_int64(ptr, value); + _Py_atomic_exchange_int64(obj, value); } static inline void -_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_store_intptr(intptr_t *obj, intptr_t value) { - _InterlockedExchangePointer((void * volatile *)ptr, (void *)value); + _InterlockedExchangePointer((void * volatile *)obj, (void *)value); } static inline void -_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_store_uint8(uint8_t *obj, uint8_t value) { - _InterlockedExchange8((volatile char*)ptr, (char)value); + _InterlockedExchange8((volatile char*)obj, (char)value); } static inline void -_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_store_uint16(uint16_t *obj, uint16_t value) { - _InterlockedExchange16((volatile short*)ptr, (short)value); + _InterlockedExchange16((volatile short*)obj, (short)value); } static inline void -_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_store_uint32(uint32_t *obj, uint32_t value) { - _InterlockedExchange((volatile long*)ptr, (long)value); + _InterlockedExchange((volatile long*)obj, (long)value); } static inline void -_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_store_uint64(uint64_t *obj, uint64_t value) { - _Py_atomic_exchange_int64((int64_t *)ptr, (int64_t)value); + _Py_atomic_exchange_int64((int64_t *)obj, (int64_t)value); } static inline void -_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *obj, uintptr_t value) { - _InterlockedExchangePointer((void * volatile *)ptr, (void *)value); + _InterlockedExchangePointer((void * volatile *)obj, (void *)value); } static inline void -_Py_atomic_store_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_store_uint(unsigned int *obj, unsigned int value) { - _InterlockedExchange((volatile long*)ptr, (long)value); + _InterlockedExchange((volatile long*)obj, (long)value); } static inline void -_Py_atomic_store_ptr(void *ptr, void *value) +_Py_atomic_store_ptr(void *obj, void *value) { - _InterlockedExchangePointer((void * volatile *)ptr, (void *)value); + _InterlockedExchangePointer((void * volatile *)obj, (void *)value); } static inline void -_Py_atomic_store_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *obj, Py_ssize_t value) { #if SIZEOF_SIZE_T == 8 - _InterlockedExchange64((volatile __int64 *)ptr, (__int64)value); + _InterlockedExchange64((volatile __int64 *)obj, (__int64)value); #else - _InterlockedExchange((volatile long*)ptr, (long)value); + _InterlockedExchange((volatile long*)obj, (long)value); #endif } static inline void -_Py_atomic_store_int_relaxed(int *ptr, int value) +_Py_atomic_store_int_relaxed(int *obj, int value) { - *(volatile int *)ptr = value; + *(volatile int *)obj = value; } static inline void -_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *obj, int8_t value) { - *(volatile int8_t *)ptr = value; + *(volatile int8_t *)obj = value; } static inline void -_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *obj, int16_t value) { - *(volatile int16_t *)ptr = value; + *(volatile int16_t *)obj = value; } static inline void -_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *obj, int32_t value) { - *(volatile int32_t *)ptr = value; + *(volatile int32_t *)obj = value; } static inline void -_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *obj, int64_t value) { - *(volatile int64_t *)ptr = value; + *(volatile int64_t *)obj = value; } static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *obj, intptr_t value) { - *(volatile intptr_t *)ptr = value; + *(volatile intptr_t *)obj = value; } static inline void -_Py_atomic_store_uint8_relaxed(uint8_t *ptr, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *obj, uint8_t value) { - *(volatile uint8_t *)ptr = value; + *(volatile uint8_t *)obj = value; } static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *obj, uint16_t value) { - *(volatile uint16_t *)ptr = value; + *(volatile uint16_t *)obj = value; } static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *obj, uint32_t value) { - *(volatile uint32_t *)ptr = value; + *(volatile uint32_t *)obj = value; } static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *obj, uint64_t value) { - *(volatile uint64_t *)ptr = value; + *(volatile uint64_t *)obj = value; } static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *obj, uintptr_t value) { - *(volatile uintptr_t *)ptr = value; + *(volatile uintptr_t *)obj = value; } static inline void -_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *obj, unsigned int value) { - *(volatile unsigned int *)ptr = value; + *(volatile unsigned int *)obj = value; } static inline void -_Py_atomic_store_ptr_relaxed(void *ptr, void* value) +_Py_atomic_store_ptr_relaxed(void *obj, void* value) { - *(void * volatile *)ptr = value; + *(void * volatile *)obj = value; } static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value) { - *(volatile Py_ssize_t *)ptr = value; + *(volatile Py_ssize_t *)obj = value; } static inline void * -_Py_atomic_load_ptr_acquire(const void *ptr) +_Py_atomic_load_ptr_acquire(const void *obj) { #if defined(_M_X64) || defined(_M_IX86) - return *(void * volatile *)ptr; + return *(void * volatile *)obj; #elif defined(_M_ARM64) - return (void *)__ldar64((unsigned __int64 volatile*)ptr); + return (void *)__ldar64((unsigned __int64 volatile*)obj); #else #error no implementation of _Py_atomic_load_ptr_acquire #endif } static inline void -_Py_atomic_store_ptr_release(void *ptr, void *value) +_Py_atomic_store_ptr_release(void *obj, void *value) { #if defined(_M_X64) || defined(_M_IX86) - *(void * volatile *)ptr = value; + *(void * volatile *)obj = value; #elif defined(_M_ARM64) - __stlr64(ptr, (uintptr_t)value); + __stlr64(obj, (uintptr_t)value); #else #error no implementation of _Py_atomic_store_ptr_release #endif diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index b1416a2950f1a4..c5de363c1d6f98 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -19,812 +19,812 @@ extern "C++" { static inline int -_Py_atomic_add_int(int *ptr, int value) +_Py_atomic_add_int(int *obj, int value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int)*)ptr, value); + return atomic_fetch_add((_Atomic(int)*)obj, value); } static inline int8_t -_Py_atomic_add_int8(int8_t *ptr, int8_t value) +_Py_atomic_add_int8(int8_t *obj, int8_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int8_t)*)ptr, value); + return atomic_fetch_add((_Atomic(int8_t)*)obj, value); } static inline int16_t -_Py_atomic_add_int16(int16_t *ptr, int16_t value) +_Py_atomic_add_int16(int16_t *obj, int16_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int16_t)*)ptr, value); + return atomic_fetch_add((_Atomic(int16_t)*)obj, value); } static inline int32_t -_Py_atomic_add_int32(int32_t *ptr, int32_t value) +_Py_atomic_add_int32(int32_t *obj, int32_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int32_t)*)ptr, value); + return atomic_fetch_add((_Atomic(int32_t)*)obj, value); } static inline int64_t -_Py_atomic_add_int64(int64_t *ptr, int64_t value) +_Py_atomic_add_int64(int64_t *obj, int64_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(int64_t)*)ptr, value); + return atomic_fetch_add((_Atomic(int64_t)*)obj, value); } static inline intptr_t -_Py_atomic_add_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_add_intptr(intptr_t *obj, intptr_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(intptr_t)*)ptr, value); + return atomic_fetch_add((_Atomic(intptr_t)*)obj, value); } static inline unsigned int -_Py_atomic_add_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_add_uint(unsigned int *obj, unsigned int value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(unsigned int)*)ptr, value); + return atomic_fetch_add((_Atomic(unsigned int)*)obj, value); } static inline uint8_t -_Py_atomic_add_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_add_uint8(uint8_t *obj, uint8_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint8_t)*)ptr, value); + return atomic_fetch_add((_Atomic(uint8_t)*)obj, value); } static inline uint16_t -_Py_atomic_add_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_add_uint16(uint16_t *obj, uint16_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint16_t)*)ptr, value); + return atomic_fetch_add((_Atomic(uint16_t)*)obj, value); } static inline uint32_t -_Py_atomic_add_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_add_uint32(uint32_t *obj, uint32_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint32_t)*)ptr, value); + return atomic_fetch_add((_Atomic(uint32_t)*)obj, value); } static inline uint64_t -_Py_atomic_add_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_add_uint64(uint64_t *obj, uint64_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uint64_t)*)ptr, value); + return atomic_fetch_add((_Atomic(uint64_t)*)obj, value); } static inline uintptr_t -_Py_atomic_add_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_add_uintptr(uintptr_t *obj, uintptr_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(uintptr_t)*)ptr, value); + return atomic_fetch_add((_Atomic(uintptr_t)*)obj, value); } static inline Py_ssize_t -_Py_atomic_add_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_add_ssize(Py_ssize_t *obj, Py_ssize_t value) { _Py_USING_STD - return atomic_fetch_add((_Atomic(Py_ssize_t)*)ptr, value); + return atomic_fetch_add((_Atomic(Py_ssize_t)*)obj, value); } static inline int -_Py_atomic_compare_exchange_int(int *ptr, int *expected, int desired) +_Py_atomic_compare_exchange_int(int *obj, int *expected, int desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int)*)ptr, + return atomic_compare_exchange_strong((_Atomic(int)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_int8(int8_t *ptr, int8_t *expected, int8_t desired) +_Py_atomic_compare_exchange_int8(int8_t *obj, int8_t *expected, int8_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int8_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(int8_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_int16(int16_t *ptr, int16_t *expected, int16_t desired) +_Py_atomic_compare_exchange_int16(int16_t *obj, int16_t *expected, int16_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int16_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(int16_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_int32(int32_t *ptr, int32_t *expected, int32_t desired) +_Py_atomic_compare_exchange_int32(int32_t *obj, int32_t *expected, int32_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int32_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(int32_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_int64(int64_t *ptr, int64_t *expected, int64_t desired) +_Py_atomic_compare_exchange_int64(int64_t *obj, int64_t *expected, int64_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(int64_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(int64_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_intptr(intptr_t *ptr, intptr_t *expected, intptr_t desired) +_Py_atomic_compare_exchange_intptr(intptr_t *obj, intptr_t *expected, intptr_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(intptr_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(intptr_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_uint(unsigned int *ptr, unsigned int *expected, unsigned int desired) +_Py_atomic_compare_exchange_uint(unsigned int *obj, unsigned int *expected, unsigned int desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(unsigned int)*)ptr, + return atomic_compare_exchange_strong((_Atomic(unsigned int)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_uint8(uint8_t *ptr, uint8_t *expected, uint8_t desired) +_Py_atomic_compare_exchange_uint8(uint8_t *obj, uint8_t *expected, uint8_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint8_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(uint8_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_uint16(uint16_t *ptr, uint16_t *expected, uint16_t desired) +_Py_atomic_compare_exchange_uint16(uint16_t *obj, uint16_t *expected, uint16_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint16_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(uint16_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_uint32(uint32_t *ptr, uint32_t *expected, uint32_t desired) +_Py_atomic_compare_exchange_uint32(uint32_t *obj, uint32_t *expected, uint32_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint32_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(uint32_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_uint64(uint64_t *ptr, uint64_t *expected, uint64_t desired) +_Py_atomic_compare_exchange_uint64(uint64_t *obj, uint64_t *expected, uint64_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uint64_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(uint64_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_uintptr(uintptr_t *ptr, uintptr_t *expected, uintptr_t desired) +_Py_atomic_compare_exchange_uintptr(uintptr_t *obj, uintptr_t *expected, uintptr_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(uintptr_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t *expected, Py_ssize_t desired) +_Py_atomic_compare_exchange_ssize(Py_ssize_t *obj, Py_ssize_t *expected, Py_ssize_t desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)ptr, + return atomic_compare_exchange_strong((_Atomic(Py_ssize_t)*)obj, expected, desired); } static inline int -_Py_atomic_compare_exchange_ptr(void *ptr, void *expected, void *desired) +_Py_atomic_compare_exchange_ptr(void *obj, void *expected, void *desired) { _Py_USING_STD - return atomic_compare_exchange_strong((_Atomic(void *)*)ptr, + return atomic_compare_exchange_strong((_Atomic(void *)*)obj, (void **)expected, desired); } static inline int -_Py_atomic_exchange_int(int *ptr, int value) +_Py_atomic_exchange_int(int *obj, int value) { _Py_USING_STD - return atomic_exchange((_Atomic(int)*)ptr, value); + return atomic_exchange((_Atomic(int)*)obj, value); } static inline int8_t -_Py_atomic_exchange_int8(int8_t *ptr, int8_t value) +_Py_atomic_exchange_int8(int8_t *obj, int8_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int8_t)*)ptr, value); + return atomic_exchange((_Atomic(int8_t)*)obj, value); } static inline int16_t -_Py_atomic_exchange_int16(int16_t *ptr, int16_t value) +_Py_atomic_exchange_int16(int16_t *obj, int16_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int16_t)*)ptr, value); + return atomic_exchange((_Atomic(int16_t)*)obj, value); } static inline int32_t -_Py_atomic_exchange_int32(int32_t *ptr, int32_t value) +_Py_atomic_exchange_int32(int32_t *obj, int32_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int32_t)*)ptr, value); + return atomic_exchange((_Atomic(int32_t)*)obj, value); } static inline int64_t -_Py_atomic_exchange_int64(int64_t *ptr, int64_t value) +_Py_atomic_exchange_int64(int64_t *obj, int64_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(int64_t)*)ptr, value); + return atomic_exchange((_Atomic(int64_t)*)obj, value); } static inline intptr_t -_Py_atomic_exchange_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_exchange_intptr(intptr_t *obj, intptr_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(intptr_t)*)ptr, value); + return atomic_exchange((_Atomic(intptr_t)*)obj, value); } static inline unsigned int -_Py_atomic_exchange_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_exchange_uint(unsigned int *obj, unsigned int value) { _Py_USING_STD - return atomic_exchange((_Atomic(unsigned int)*)ptr, value); + return atomic_exchange((_Atomic(unsigned int)*)obj, value); } static inline uint8_t -_Py_atomic_exchange_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_exchange_uint8(uint8_t *obj, uint8_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint8_t)*)ptr, value); + return atomic_exchange((_Atomic(uint8_t)*)obj, value); } static inline uint16_t -_Py_atomic_exchange_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_exchange_uint16(uint16_t *obj, uint16_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint16_t)*)ptr, value); + return atomic_exchange((_Atomic(uint16_t)*)obj, value); } static inline uint32_t -_Py_atomic_exchange_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_exchange_uint32(uint32_t *obj, uint32_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint32_t)*)ptr, value); + return atomic_exchange((_Atomic(uint32_t)*)obj, value); } static inline uint64_t -_Py_atomic_exchange_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_exchange_uint64(uint64_t *obj, uint64_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uint64_t)*)ptr, value); + return atomic_exchange((_Atomic(uint64_t)*)obj, value); } static inline uintptr_t -_Py_atomic_exchange_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_exchange_uintptr(uintptr_t *obj, uintptr_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(uintptr_t)*)ptr, value); + return atomic_exchange((_Atomic(uintptr_t)*)obj, value); } static inline Py_ssize_t -_Py_atomic_exchange_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_exchange_ssize(Py_ssize_t *obj, Py_ssize_t value) { _Py_USING_STD - return atomic_exchange((_Atomic(Py_ssize_t)*)ptr, value); + return atomic_exchange((_Atomic(Py_ssize_t)*)obj, value); } static inline void * -_Py_atomic_exchange_ptr(void *ptr, void *value) +_Py_atomic_exchange_ptr(void *obj, void *value) { _Py_USING_STD - return atomic_exchange((_Atomic(void *)*)ptr, value); + return atomic_exchange((_Atomic(void *)*)obj, value); } static inline uint8_t -_Py_atomic_and_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_and_uint8(uint8_t *obj, uint8_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint8_t)*)ptr, value); + return atomic_fetch_and((_Atomic(uint8_t)*)obj, value); } static inline uint16_t -_Py_atomic_and_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_and_uint16(uint16_t *obj, uint16_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint16_t)*)ptr, value); + return atomic_fetch_and((_Atomic(uint16_t)*)obj, value); } static inline uint32_t -_Py_atomic_and_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_and_uint32(uint32_t *obj, uint32_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint32_t)*)ptr, value); + return atomic_fetch_and((_Atomic(uint32_t)*)obj, value); } static inline uint64_t -_Py_atomic_and_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_and_uint64(uint64_t *obj, uint64_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uint64_t)*)ptr, value); + return atomic_fetch_and((_Atomic(uint64_t)*)obj, value); } static inline uintptr_t -_Py_atomic_and_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_and_uintptr(uintptr_t *obj, uintptr_t value) { _Py_USING_STD - return atomic_fetch_and((_Atomic(uintptr_t)*)ptr, value); + return atomic_fetch_and((_Atomic(uintptr_t)*)obj, value); } static inline uint8_t -_Py_atomic_or_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_or_uint8(uint8_t *obj, uint8_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint8_t)*)ptr, value); + return atomic_fetch_or((_Atomic(uint8_t)*)obj, value); } static inline uint16_t -_Py_atomic_or_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_or_uint16(uint16_t *obj, uint16_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint16_t)*)ptr, value); + return atomic_fetch_or((_Atomic(uint16_t)*)obj, value); } static inline uint32_t -_Py_atomic_or_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_or_uint32(uint32_t *obj, uint32_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint32_t)*)ptr, value); + return atomic_fetch_or((_Atomic(uint32_t)*)obj, value); } static inline uint64_t -_Py_atomic_or_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_or_uint64(uint64_t *obj, uint64_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uint64_t)*)ptr, value); + return atomic_fetch_or((_Atomic(uint64_t)*)obj, value); } static inline uintptr_t -_Py_atomic_or_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_or_uintptr(uintptr_t *obj, uintptr_t value) { _Py_USING_STD - return atomic_fetch_or((_Atomic(uintptr_t)*)ptr, value); + return atomic_fetch_or((_Atomic(uintptr_t)*)obj, value); } static inline int -_Py_atomic_load_int(const int *ptr) +_Py_atomic_load_int(const int *obj) { _Py_USING_STD - return atomic_load((const _Atomic(int)*)ptr); + return atomic_load((const _Atomic(int)*)obj); } static inline int8_t -_Py_atomic_load_int8(const int8_t *ptr) +_Py_atomic_load_int8(const int8_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(int8_t)*)ptr); + return atomic_load((const _Atomic(int8_t)*)obj); } static inline int16_t -_Py_atomic_load_int16(const int16_t *ptr) +_Py_atomic_load_int16(const int16_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(int16_t)*)ptr); + return atomic_load((const _Atomic(int16_t)*)obj); } static inline int32_t -_Py_atomic_load_int32(const int32_t *ptr) +_Py_atomic_load_int32(const int32_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(int32_t)*)ptr); + return atomic_load((const _Atomic(int32_t)*)obj); } static inline int64_t -_Py_atomic_load_int64(const int64_t *ptr) +_Py_atomic_load_int64(const int64_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(int64_t)*)ptr); + return atomic_load((const _Atomic(int64_t)*)obj); } static inline intptr_t -_Py_atomic_load_intptr(const intptr_t *ptr) +_Py_atomic_load_intptr(const intptr_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(intptr_t)*)ptr); + return atomic_load((const _Atomic(intptr_t)*)obj); } static inline uint8_t -_Py_atomic_load_uint8(const uint8_t *ptr) +_Py_atomic_load_uint8(const uint8_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(uint8_t)*)ptr); + return atomic_load((const _Atomic(uint8_t)*)obj); } static inline uint16_t -_Py_atomic_load_uint16(const uint16_t *ptr) +_Py_atomic_load_uint16(const uint16_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(uint32_t)*)ptr); + return atomic_load((const _Atomic(uint32_t)*)obj); } static inline uint32_t -_Py_atomic_load_uint32(const uint32_t *ptr) +_Py_atomic_load_uint32(const uint32_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(uint32_t)*)ptr); + return atomic_load((const _Atomic(uint32_t)*)obj); } static inline uint64_t -_Py_atomic_load_uint64(const uint64_t *ptr) +_Py_atomic_load_uint64(const uint64_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(uint64_t)*)ptr); + return atomic_load((const _Atomic(uint64_t)*)obj); } static inline uintptr_t -_Py_atomic_load_uintptr(const uintptr_t *ptr) +_Py_atomic_load_uintptr(const uintptr_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(uintptr_t)*)ptr); + return atomic_load((const _Atomic(uintptr_t)*)obj); } static inline unsigned int -_Py_atomic_load_uint(const unsigned int *ptr) +_Py_atomic_load_uint(const unsigned int *obj) { _Py_USING_STD - return atomic_load((const _Atomic(unsigned int)*)ptr); + return atomic_load((const _Atomic(unsigned int)*)obj); } static inline Py_ssize_t -_Py_atomic_load_ssize(const Py_ssize_t *ptr) +_Py_atomic_load_ssize(const Py_ssize_t *obj) { _Py_USING_STD - return atomic_load((const _Atomic(Py_ssize_t)*)ptr); + return atomic_load((const _Atomic(Py_ssize_t)*)obj); } static inline void * -_Py_atomic_load_ptr(const void *ptr) +_Py_atomic_load_ptr(const void *obj) { _Py_USING_STD - return atomic_load((const _Atomic(void*)*)ptr); + return atomic_load((const _Atomic(void*)*)obj); } static inline int -_Py_atomic_load_int_relaxed(const int *ptr) +_Py_atomic_load_int_relaxed(const int *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int)*)ptr, + return atomic_load_explicit((const _Atomic(int)*)obj, memory_order_relaxed); } static inline int8_t -_Py_atomic_load_int8_relaxed(const int8_t *ptr) +_Py_atomic_load_int8_relaxed(const int8_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int8_t)*)ptr, + return atomic_load_explicit((const _Atomic(int8_t)*)obj, memory_order_relaxed); } static inline int16_t -_Py_atomic_load_int16_relaxed(const int16_t *ptr) +_Py_atomic_load_int16_relaxed(const int16_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int16_t)*)ptr, + return atomic_load_explicit((const _Atomic(int16_t)*)obj, memory_order_relaxed); } static inline int32_t -_Py_atomic_load_int32_relaxed(const int32_t *ptr) +_Py_atomic_load_int32_relaxed(const int32_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int32_t)*)ptr, + return atomic_load_explicit((const _Atomic(int32_t)*)obj, memory_order_relaxed); } static inline int64_t -_Py_atomic_load_int64_relaxed(const int64_t *ptr) +_Py_atomic_load_int64_relaxed(const int64_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(int64_t)*)ptr, + return atomic_load_explicit((const _Atomic(int64_t)*)obj, memory_order_relaxed); } static inline intptr_t -_Py_atomic_load_intptr_relaxed(const intptr_t *ptr) +_Py_atomic_load_intptr_relaxed(const intptr_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(intptr_t)*)ptr, + return atomic_load_explicit((const _Atomic(intptr_t)*)obj, memory_order_relaxed); } static inline uint8_t -_Py_atomic_load_uint8_relaxed(const uint8_t *ptr) +_Py_atomic_load_uint8_relaxed(const uint8_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint8_t)*)ptr, + return atomic_load_explicit((const _Atomic(uint8_t)*)obj, memory_order_relaxed); } static inline uint16_t -_Py_atomic_load_uint16_relaxed(const uint16_t *ptr) +_Py_atomic_load_uint16_relaxed(const uint16_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint16_t)*)ptr, + return atomic_load_explicit((const _Atomic(uint16_t)*)obj, memory_order_relaxed); } static inline uint32_t -_Py_atomic_load_uint32_relaxed(const uint32_t *ptr) +_Py_atomic_load_uint32_relaxed(const uint32_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint32_t)*)ptr, + return atomic_load_explicit((const _Atomic(uint32_t)*)obj, memory_order_relaxed); } static inline uint64_t -_Py_atomic_load_uint64_relaxed(const uint64_t *ptr) +_Py_atomic_load_uint64_relaxed(const uint64_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uint64_t)*)ptr, + return atomic_load_explicit((const _Atomic(uint64_t)*)obj, memory_order_relaxed); } static inline uintptr_t -_Py_atomic_load_uintptr_relaxed(const uintptr_t *ptr) +_Py_atomic_load_uintptr_relaxed(const uintptr_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(uintptr_t)*)ptr, + return atomic_load_explicit((const _Atomic(uintptr_t)*)obj, memory_order_relaxed); } static inline unsigned int -_Py_atomic_load_uint_relaxed(const unsigned int *ptr) +_Py_atomic_load_uint_relaxed(const unsigned int *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(unsigned int)*)ptr, + return atomic_load_explicit((const _Atomic(unsigned int)*)obj, memory_order_relaxed); } static inline Py_ssize_t -_Py_atomic_load_ssize_relaxed(const Py_ssize_t *ptr) +_Py_atomic_load_ssize_relaxed(const Py_ssize_t *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(Py_ssize_t)*)ptr, + return atomic_load_explicit((const _Atomic(Py_ssize_t)*)obj, memory_order_relaxed); } static inline void * -_Py_atomic_load_ptr_relaxed(const void *ptr) +_Py_atomic_load_ptr_relaxed(const void *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(void*)*)ptr, + return atomic_load_explicit((const _Atomic(void*)*)obj, memory_order_relaxed); } static inline void -_Py_atomic_store_int(int *ptr, int value) +_Py_atomic_store_int(int *obj, int value) { _Py_USING_STD - atomic_store((_Atomic(int)*)ptr, value); + atomic_store((_Atomic(int)*)obj, value); } static inline void -_Py_atomic_store_int8(int8_t *ptr, int8_t value) +_Py_atomic_store_int8(int8_t *obj, int8_t value) { _Py_USING_STD - atomic_store((_Atomic(int8_t)*)ptr, value); + atomic_store((_Atomic(int8_t)*)obj, value); } static inline void -_Py_atomic_store_int16(int16_t *ptr, int16_t value) +_Py_atomic_store_int16(int16_t *obj, int16_t value) { _Py_USING_STD - atomic_store((_Atomic(int16_t)*)ptr, value); + atomic_store((_Atomic(int16_t)*)obj, value); } static inline void -_Py_atomic_store_int32(int32_t *ptr, int32_t value) +_Py_atomic_store_int32(int32_t *obj, int32_t value) { _Py_USING_STD - atomic_store((_Atomic(int32_t)*)ptr, value); + atomic_store((_Atomic(int32_t)*)obj, value); } static inline void -_Py_atomic_store_int64(int64_t *ptr, int64_t value) +_Py_atomic_store_int64(int64_t *obj, int64_t value) { _Py_USING_STD - atomic_store((_Atomic(int64_t)*)ptr, value); + atomic_store((_Atomic(int64_t)*)obj, value); } static inline void -_Py_atomic_store_intptr(intptr_t *ptr, intptr_t value) +_Py_atomic_store_intptr(intptr_t *obj, intptr_t value) { _Py_USING_STD - atomic_store((_Atomic(intptr_t)*)ptr, value); + atomic_store((_Atomic(intptr_t)*)obj, value); } static inline void -_Py_atomic_store_uint8(uint8_t *ptr, uint8_t value) +_Py_atomic_store_uint8(uint8_t *obj, uint8_t value) { _Py_USING_STD - atomic_store((_Atomic(uint8_t)*)ptr, value); + atomic_store((_Atomic(uint8_t)*)obj, value); } static inline void -_Py_atomic_store_uint16(uint16_t *ptr, uint16_t value) +_Py_atomic_store_uint16(uint16_t *obj, uint16_t value) { _Py_USING_STD - atomic_store((_Atomic(uint16_t)*)ptr, value); + atomic_store((_Atomic(uint16_t)*)obj, value); } static inline void -_Py_atomic_store_uint32(uint32_t *ptr, uint32_t value) +_Py_atomic_store_uint32(uint32_t *obj, uint32_t value) { _Py_USING_STD - atomic_store((_Atomic(uint32_t)*)ptr, value); + atomic_store((_Atomic(uint32_t)*)obj, value); } static inline void -_Py_atomic_store_uint64(uint64_t *ptr, uint64_t value) +_Py_atomic_store_uint64(uint64_t *obj, uint64_t value) { _Py_USING_STD - atomic_store((_Atomic(uint64_t)*)ptr, value); + atomic_store((_Atomic(uint64_t)*)obj, value); } static inline void -_Py_atomic_store_uintptr(uintptr_t *ptr, uintptr_t value) +_Py_atomic_store_uintptr(uintptr_t *obj, uintptr_t value) { _Py_USING_STD - atomic_store((_Atomic(uintptr_t)*)ptr, value); + atomic_store((_Atomic(uintptr_t)*)obj, value); } static inline void -_Py_atomic_store_uint(unsigned int *ptr, unsigned int value) +_Py_atomic_store_uint(unsigned int *obj, unsigned int value) { _Py_USING_STD - atomic_store((_Atomic(unsigned int)*)ptr, value); + atomic_store((_Atomic(unsigned int)*)obj, value); } static inline void -_Py_atomic_store_ptr(void *ptr, void *value) +_Py_atomic_store_ptr(void *obj, void *value) { _Py_USING_STD - atomic_store((_Atomic(void*)*)ptr, value); + atomic_store((_Atomic(void*)*)obj, value); } static inline void -_Py_atomic_store_ssize(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_store_ssize(Py_ssize_t *obj, Py_ssize_t value) { _Py_USING_STD - atomic_store((_Atomic(Py_ssize_t)*)ptr, value); + atomic_store((_Atomic(Py_ssize_t)*)obj, value); } static inline void -_Py_atomic_store_int_relaxed(int *ptr, int value) +_Py_atomic_store_int_relaxed(int *obj, int value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int)*)ptr, value, + atomic_store_explicit((_Atomic(int)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int8_relaxed(int8_t *ptr, int8_t value) +_Py_atomic_store_int8_relaxed(int8_t *obj, int8_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int8_t)*)ptr, value, + atomic_store_explicit((_Atomic(int8_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int16_relaxed(int16_t *ptr, int16_t value) +_Py_atomic_store_int16_relaxed(int16_t *obj, int16_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int16_t)*)ptr, value, + atomic_store_explicit((_Atomic(int16_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int32_relaxed(int32_t *ptr, int32_t value) +_Py_atomic_store_int32_relaxed(int32_t *obj, int32_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int32_t)*)ptr, value, + atomic_store_explicit((_Atomic(int32_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_int64_relaxed(int64_t *ptr, int64_t value) +_Py_atomic_store_int64_relaxed(int64_t *obj, int64_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(int64_t)*)ptr, value, + atomic_store_explicit((_Atomic(int64_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_intptr_relaxed(intptr_t *ptr, intptr_t value) +_Py_atomic_store_intptr_relaxed(intptr_t *obj, intptr_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(intptr_t)*)ptr, value, + atomic_store_explicit((_Atomic(intptr_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint8_relaxed(uint8_t *ptr, uint8_t value) +_Py_atomic_store_uint8_relaxed(uint8_t *obj, uint8_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint8_t)*)ptr, value, + atomic_store_explicit((_Atomic(uint8_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint16_relaxed(uint16_t *ptr, uint16_t value) +_Py_atomic_store_uint16_relaxed(uint16_t *obj, uint16_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint16_t)*)ptr, value, + atomic_store_explicit((_Atomic(uint16_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint32_relaxed(uint32_t *ptr, uint32_t value) +_Py_atomic_store_uint32_relaxed(uint32_t *obj, uint32_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint32_t)*)ptr, value, + atomic_store_explicit((_Atomic(uint32_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint64_relaxed(uint64_t *ptr, uint64_t value) +_Py_atomic_store_uint64_relaxed(uint64_t *obj, uint64_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uint64_t)*)ptr, value, + atomic_store_explicit((_Atomic(uint64_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uintptr_relaxed(uintptr_t *ptr, uintptr_t value) +_Py_atomic_store_uintptr_relaxed(uintptr_t *obj, uintptr_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(uintptr_t)*)ptr, value, + atomic_store_explicit((_Atomic(uintptr_t)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_uint_relaxed(unsigned int *ptr, unsigned int value) +_Py_atomic_store_uint_relaxed(unsigned int *obj, unsigned int value) { _Py_USING_STD - atomic_store_explicit((_Atomic(unsigned int)*)ptr, value, + atomic_store_explicit((_Atomic(unsigned int)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_ptr_relaxed(void *ptr, void *value) +_Py_atomic_store_ptr_relaxed(void *obj, void *value) { _Py_USING_STD - atomic_store_explicit((_Atomic(void*)*)ptr, value, + atomic_store_explicit((_Atomic(void*)*)obj, value, memory_order_relaxed); } static inline void -_Py_atomic_store_ssize_relaxed(Py_ssize_t *ptr, Py_ssize_t value) +_Py_atomic_store_ssize_relaxed(Py_ssize_t *obj, Py_ssize_t value) { _Py_USING_STD - atomic_store_explicit((_Atomic(Py_ssize_t)*)ptr, value, + atomic_store_explicit((_Atomic(Py_ssize_t)*)obj, value, memory_order_relaxed); } static inline void * -_Py_atomic_load_ptr_acquire(const void *ptr) +_Py_atomic_load_ptr_acquire(const void *obj) { _Py_USING_STD - return atomic_load_explicit((const _Atomic(void*)*)ptr, + return atomic_load_explicit((const _Atomic(void*)*)obj, memory_order_acquire); } static inline void -_Py_atomic_store_ptr_release(void *ptr, void *value) +_Py_atomic_store_ptr_release(void *obj, void *value) { _Py_USING_STD - atomic_store_explicit((_Atomic(void*)*)ptr, value, + atomic_store_explicit((_Atomic(void*)*)obj, value, memory_order_release); }