mirror of
https://github.com/vlang/v.git
synced 2025-09-13 14:32:26 +03:00
sync.stdatomic: fix bug with add() and sub() returning the new values, add voidptr support, add swap() and compare_and_swap() (#24685)
This commit is contained in:
parent
174065f16f
commit
0c495d07d7
5 changed files with 321 additions and 91 deletions
40
thirdparty/stdatomic/nix/atomic.h
vendored
40
thirdparty/stdatomic/nix/atomic.h
vendored
|
@ -693,35 +693,35 @@ extern inline unsigned long long __aarch64_ldeor8_relax(unsigned long long*ptr,
|
|||
|
||||
// Since V might be confused with "generic" C functions either we provide special versions
|
||||
// for gcc/clang, too
|
||||
static inline unsigned long long atomic_load_u64(unsigned long long* x) {
|
||||
return atomic_load_explicit((_Atomic (unsigned long long)*)x, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_load_u64(uint64_t* x) {
|
||||
return atomic_load_explicit((_Atomic (uint64_t)*)x, memory_order_seq_cst);
|
||||
}
|
||||
static inline void atomic_store_u64(unsigned long long* x, unsigned long long y) {
|
||||
atomic_store_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline void atomic_store_u64(uint64_t* x, uint64_t y) {
|
||||
atomic_store_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
static inline int atomic_compare_exchange_weak_u64(unsigned long long* x, unsigned long long* expected, unsigned long long y) {
|
||||
return (int)atomic_compare_exchange_weak_explicit((_Atomic(unsigned long long)*)x, expected, y, memory_order_seq_cst, memory_order_seq_cst);
|
||||
static inline int atomic_compare_exchange_weak_u64(uint64_t* x, uint64_t* expected, uint64_t y) {
|
||||
return (int)atomic_compare_exchange_weak_explicit((_Atomic(uint64_t)*)x, expected, y, memory_order_seq_cst, memory_order_seq_cst);
|
||||
}
|
||||
static inline int atomic_compare_exchange_strong_u64(unsigned long long* x, unsigned long long* expected, unsigned long long y) {
|
||||
return (int)atomic_compare_exchange_strong_explicit((_Atomic(unsigned long long)*)x, expected, y, memory_order_seq_cst, memory_order_seq_cst);
|
||||
static inline int atomic_compare_exchange_strong_u64(uint64_t* x, uint64_t* expected, uint64_t y) {
|
||||
return (int)atomic_compare_exchange_strong_explicit((_Atomic(uint64_t)*)x, expected, y, memory_order_seq_cst, memory_order_seq_cst);
|
||||
}
|
||||
static inline unsigned long long atomic_exchange_u64(unsigned long long* x, unsigned long long y) {
|
||||
return atomic_exchange_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_exchange_u64(uint64_t* x, uint64_t y) {
|
||||
return atomic_exchange_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
static inline unsigned long long atomic_fetch_add_u64(unsigned long long* x, unsigned long long y) {
|
||||
return atomic_fetch_add_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_fetch_add_u64(uint64_t* x, uint64_t y) {
|
||||
return atomic_fetch_add_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
static inline unsigned long long atomic_fetch_sub_u64(unsigned long long* x, unsigned long long y) {
|
||||
return atomic_fetch_sub_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_fetch_sub_u64(uint64_t* x, uint64_t y) {
|
||||
return atomic_fetch_sub_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
static inline unsigned long long atomic_fetch_and_u64(unsigned long long* x, unsigned long long y) {
|
||||
return atomic_fetch_and_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_fetch_and_u64(uint64_t* x, uint64_t y) {
|
||||
return atomic_fetch_and_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
static inline unsigned long long atomic_fetch_or_u64(unsigned long long* x, unsigned long long y) {
|
||||
return atomic_fetch_or_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_fetch_or_u64(uint64_t* x, uint64_t y) {
|
||||
return atomic_fetch_or_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
static inline unsigned long long atomic_fetch_xor_u64(unsigned long long* x, unsigned long long y) {
|
||||
return atomic_fetch_xor_explicit((_Atomic(unsigned long long)*)x, y, memory_order_seq_cst);
|
||||
static inline unsigned long long atomic_fetch_xor_u64(uint64_t* x, uint64_t y) {
|
||||
return atomic_fetch_xor_explicit((_Atomic(uint64_t)*)x, y, memory_order_seq_cst);
|
||||
}
|
||||
|
||||
|
||||
|
|
23
thirdparty/stdatomic/win/atomic.h
vendored
23
thirdparty/stdatomic/win/atomic.h
vendored
|
@ -294,8 +294,20 @@ static inline int atomic_compare_exchange_strong_u32(unsigned volatile * object,
|
|||
|
||||
#else
|
||||
|
||||
#define InterlockedExchange16 ManualInterlockedExchange16
|
||||
#define InterlockedExchangeAdd16 ManualInterlockedExchangeAdd16
|
||||
|
||||
static inline uint16_t ManualInterlockedExchange16(volatile uint16_t* object, uint16_t desired) {
|
||||
__asm__ __volatile__ (
|
||||
"xchgw %0, %1"
|
||||
: "+r" (desired),
|
||||
"+m" (*object)
|
||||
:
|
||||
: "memory"
|
||||
);
|
||||
return desired;
|
||||
}
|
||||
|
||||
static inline unsigned short ManualInterlockedExchangeAdd16(unsigned short volatile* Addend, unsigned short Value) {
|
||||
__asm__ __volatile__ (
|
||||
"lock xaddw %w[value], %[mem]"
|
||||
|
@ -385,12 +397,23 @@ static inline int atomic_compare_exchange_strong_u16(unsigned short volatile * o
|
|||
|
||||
#else
|
||||
|
||||
#define InterlockedExchange8 ManualInterlockedExchange8
|
||||
#define InterlockedCompareExchange8 ManualInterlockedCompareExchange8
|
||||
#define InterlockedExchangeAdd8 ManualInterlockedExchangeAdd8
|
||||
#define InterlockedOr8 ManualInterlockedOr8
|
||||
#define InterlockedXor8 ManualInterlockedXor8
|
||||
#define InterlockedAnd8 ManualInterlockedAnd8
|
||||
|
||||
static inline char ManualInterlockedExchange8(char volatile* object, char desired) {
|
||||
__asm__ __volatile__ (
|
||||
"xchgb %0, %1"
|
||||
: "+q" (desired), "+m" (*object)
|
||||
:
|
||||
: "memory"
|
||||
);
|
||||
return desired;
|
||||
}
|
||||
|
||||
static inline unsigned char ManualInterlockedCompareExchange8(unsigned char volatile * dest, unsigned char exchange, unsigned char comparand) {
|
||||
unsigned char result;
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue