thirdparty: add aarch64 atomics support in thirdparty/stdatomic/nix/atomic.h (fix #24294) (#24296)

This commit is contained in:
kbkpbot 2025-04-25 11:28:16 +08:00 committed by GitHub
parent 19900fd50c
commit 09592349f0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -28,7 +28,6 @@ typedef volatile uintptr_t atomic_uintptr_t;
extern unsigned long long __atomic_load_8(unsigned long long* x, int mo);
extern void __atomic_store_8(unsigned long long* x, unsigned long long y, int mo);
extern _Bool __atomic_compare_exchange_8(unsigned long long* x, unsigned long long* expected, unsigned long long y, int mo, int mo2);
extern _Bool __atomic_compare_exchange_8(unsigned long long* x, unsigned long long* expected, unsigned long long y, int mo, int mo2);
extern unsigned long long __atomic_exchange_8(unsigned long long* x, unsigned long long y, int mo);
extern unsigned long long __atomic_fetch_add_8(unsigned long long* x, unsigned long long y, int mo);
extern unsigned long long __atomic_fetch_sub_8(unsigned long long* x, unsigned long long y, int mo);
@ -39,7 +38,6 @@ extern unsigned long long __atomic_fetch_xor_8(unsigned long long* x, unsigned l
extern unsigned int __atomic_load_4(unsigned int* x, int mo);
extern void __atomic_store_4(unsigned int* x, unsigned int y, int mo);
extern _Bool __atomic_compare_exchange_4(unsigned int* x, unsigned int* expected, unsigned int y, int mo, int mo2);
extern _Bool __atomic_compare_exchange_4(unsigned int* x, unsigned int* expected, unsigned int y, int mo, int mo2);
extern unsigned int __atomic_exchange_4(unsigned int* x, unsigned int y, int mo);
extern unsigned int __atomic_fetch_add_4(unsigned int* x, unsigned int y, int mo);
extern unsigned int __atomic_fetch_sub_4(unsigned int* x, unsigned int y, int mo);
@ -50,7 +48,6 @@ extern unsigned int __atomic_fetch_xor_4(unsigned int* x, unsigned int y, int mo
extern unsigned short __atomic_load_2(unsigned short* x, int mo);
extern void __atomic_store_2(unsigned short* x, unsigned short y, int mo);
extern _Bool __atomic_compare_exchange_2(unsigned short* x, unsigned short* expected, unsigned short y, int mo, int mo2);
extern _Bool __atomic_compare_exchange_2(unsigned short* x, unsigned short* expected, unsigned short y, int mo, int mo2);
extern unsigned short __atomic_exchange_2(unsigned short* x, unsigned short y, int mo);
extern unsigned short __atomic_fetch_add_2(unsigned short* x, unsigned short y, int mo);
extern unsigned short __atomic_fetch_sub_2(unsigned short* x, unsigned short y, int mo);
@ -61,7 +58,6 @@ extern unsigned short __atomic_fetch_xor_2(unsigned short* x, unsigned short y,
extern unsigned char __atomic_load_1(unsigned char* x, int mo);
extern void __atomic_store_1(unsigned char* x, unsigned char y, int mo);
extern _Bool __atomic_compare_exchange_1(unsigned char* x, unsigned char* expected, unsigned char y, int mo, int mo2);
extern _Bool __atomic_compare_exchange_1(unsigned char* x, unsigned char* expected, unsigned char y, int mo, int mo2);
extern unsigned char __atomic_exchange_1(unsigned char* x, unsigned char y, int mo);
extern unsigned char __atomic_fetch_add_1(unsigned char* x, unsigned char y, int mo);
extern unsigned char __atomic_fetch_sub_1(unsigned char* x, unsigned char y, int mo);
@ -80,6 +76,7 @@ extern unsigned char __atomic_fetch_xor_1(unsigned char* x, unsigned char y, int
#define atomic_fetch_add_explicit __atomic_fetch_add_4
#define atomic_fetch_sub_explicit __atomic_sub_fetch_4
#else
#define atomic_load_explicit __atomic_load_8
@ -269,6 +266,429 @@ static inline unsigned char atomic_fetch_xor_byte(unsigned char* x, unsigned cha
return __atomic_fetch_xor_1(x, y, memory_order_seq_cst);
}
#ifdef __aarch64__
// must has an `extern` to link with libatomic.a
// acq_rel version
extern inline _Bool __aarch64_cas1_acq_rel(unsigned char*ptr, unsigned char*expected, unsigned char desired) {
return __atomic_compare_exchange_1(
ptr,
expected,
desired,
memory_order_acq_rel,
memory_order_acquire
);
}
extern inline _Bool __aarch64_cas2_acq_rel(unsigned short*ptr, unsigned short*expected, unsigned short desired) {
return __atomic_compare_exchange_2(
ptr,
expected,
desired,
memory_order_acq_rel,
memory_order_acquire
);
}
extern inline _Bool __aarch64_cas4_acq_rel(unsigned int*ptr, unsigned int*expected, unsigned int desired) {
return __atomic_compare_exchange_4(
ptr,
expected,
desired,
memory_order_acq_rel,
memory_order_acquire
);
}
extern inline _Bool __aarch64_cas8_acq_rel(unsigned long long*ptr, unsigned long long*expected, unsigned long long desired) {
return __atomic_compare_exchange_8(
ptr,
expected,
desired,
memory_order_acq_rel,
memory_order_acquire
);
}
extern inline char __aarch64_ldadd1_acq_rel(char*ptr, char value) {
return __atomic_fetch_add_1(
(unsigned char*)ptr,
(unsigned char)value,
memory_order_acq_rel
);
}
extern inline short __aarch64_ldadd2_acq_rel(short*ptr, short value) {
return __atomic_fetch_add_2(
(unsigned short*)ptr,
(unsigned short)value,
memory_order_acq_rel
);
}
extern inline int __aarch64_ldadd4_acq_rel(int*ptr, int value) {
return __atomic_fetch_add_4(
(unsigned int*)ptr,
(unsigned int)value,
memory_order_acq_rel
);
}
extern inline long long __aarch64_ldadd8_acq_rel(long long*ptr, long long value) {
return __atomic_fetch_add_8(
(unsigned long long*)ptr,
(unsigned long long)value,
memory_order_acq_rel
);
}
extern inline unsigned char __aarch64_swp1_acq_rel(unsigned char*ptr, unsigned char newval) {
return __atomic_exchange_1(
ptr,
newval,
memory_order_acq_rel
);
}
extern inline unsigned short __aarch64_swp2_acq_rel(unsigned short*ptr, unsigned short newval) {
return __atomic_exchange_2(
ptr,
newval,
memory_order_acq_rel
);
}
extern inline unsigned int __aarch64_swp4_acq_rel(unsigned int*ptr, unsigned int newval) {
return __atomic_exchange_4(
ptr,
newval,
memory_order_acq_rel
);
}
extern inline unsigned long long __aarch64_swp8_acq_rel(unsigned long long*ptr, unsigned long long newval) {
return __atomic_exchange_8(
ptr,
newval,
memory_order_acq_rel
);
}
extern inline unsigned char __aarch64_ldclr1_acq_rel(unsigned char*ptr, unsigned char mask) {
return __atomic_fetch_and_1(
ptr,
~mask,
memory_order_acq_rel
);
}
extern inline unsigned short __aarch64_ldclr2_acq_rel(unsigned short*ptr, unsigned short mask) {
return __atomic_fetch_and_2(
ptr,
~mask,
memory_order_acq_rel
);
}
extern inline unsigned int __aarch64_ldclr4_acq_rel(unsigned int*ptr, unsigned int mask) {
return __atomic_fetch_and_4(
ptr,
~mask,
memory_order_acq_rel
);
}
extern inline unsigned long long __aarch64_ldclr8_acq_rel(unsigned long long*ptr, unsigned long long mask) {
return __atomic_fetch_and_8(
ptr,
~mask,
memory_order_acq_rel
);
}
extern inline unsigned char __aarch64_ldset1_acq_rel(unsigned char*ptr, unsigned char mask) {
return __atomic_fetch_or_1(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned short __aarch64_ldset2_acq_rel(unsigned short*ptr, unsigned short mask) {
return __atomic_fetch_or_2(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned int __aarch64_ldset4_acq_rel(unsigned int*ptr, unsigned int mask) {
return __atomic_fetch_or_4(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned long long __aarch64_ldset8_acq_rel(unsigned long long*ptr, unsigned long long mask) {
return __atomic_fetch_or_8(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned char __aarch64_ldeor1_acq_rel(unsigned char*ptr, unsigned char mask) {
return __atomic_fetch_xor_1(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned short __aarch64_ldeor2_acq_rel(unsigned short*ptr, unsigned short mask) {
return __atomic_fetch_xor_2(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned int __aarch64_ldeor4_acq_rel(unsigned int*ptr, unsigned int mask) {
return __atomic_fetch_xor_4(
ptr,
mask,
memory_order_acq_rel
);
}
extern inline unsigned long long __aarch64_ldeor8_acq_rel(unsigned long long*ptr, unsigned long long mask) {
return __atomic_fetch_xor_8(
ptr,
mask,
memory_order_acq_rel
);
}
#define aarch64_cas_acq_rel(ptr, expected, desired) \
_Generic((ptr), \
char*: __aarch64_cas1_acq_rel, \
short*: __aarch64_cas2_acq_rel, \
int*: __aarch64_cas4_acq_rel, \
long long*: __aarch64_cas8_acq_rel \
)(ptr, expected, desired)
// relax version
extern inline _Bool __aarch64_cas1_relax(unsigned char*ptr, unsigned char*expected, unsigned char desired) {
return __atomic_compare_exchange_1(
ptr,
expected,
desired,
memory_order_relaxed,
memory_order_relaxed
);
}
extern inline _Bool __aarch64_cas2_relax(unsigned short*ptr, unsigned short*expected, unsigned short desired) {
return __atomic_compare_exchange_2(
ptr,
expected,
desired,
memory_order_relaxed,
memory_order_relaxed
);
}
extern inline _Bool __aarch64_cas4_relax(unsigned int*ptr, unsigned int*expected, unsigned int desired) {
return __atomic_compare_exchange_4(
ptr,
expected,
desired,
memory_order_relaxed,
memory_order_relaxed
);
}
extern inline _Bool __aarch64_cas8_relax(unsigned long long*ptr, unsigned long long*expected, unsigned long long desired) {
return __atomic_compare_exchange_8(
ptr,
expected,
desired,
memory_order_relaxed,
memory_order_relaxed
);
}
extern inline char __aarch64_ldadd1_relax(char*ptr, char value) {
return __atomic_fetch_add_1(
(unsigned char*)ptr,
(unsigned char)value,
memory_order_relaxed
);
}
extern inline short __aarch64_ldadd2_relax(short*ptr, short value) {
return __atomic_fetch_add_2(
(unsigned short*)ptr,
(unsigned short)value,
memory_order_relaxed
);
}
extern inline int __aarch64_ldadd4_relax(int*ptr, int value) {
return __atomic_fetch_add_4(
(unsigned int*)ptr,
(unsigned int)value,
memory_order_relaxed
);
}
extern inline long long __aarch64_ldadd8_relax(long long*ptr, long long value) {
return __atomic_fetch_add_8(
(unsigned long long*)ptr,
(unsigned long long)value,
memory_order_relaxed
);
}
extern inline unsigned char __aarch64_swp1_relax(unsigned char*ptr, unsigned char newval) {
return __atomic_exchange_1(
ptr,
newval,
memory_order_relaxed
);
}
extern inline unsigned short __aarch64_swp2_relax(unsigned short*ptr, unsigned short newval) {
return __atomic_exchange_2(
ptr,
newval,
memory_order_relaxed
);
}
extern inline unsigned int __aarch64_swp4_relax(unsigned int*ptr, unsigned int newval) {
return __atomic_exchange_4(
ptr,
newval,
memory_order_relaxed
);
}
extern inline unsigned long long __aarch64_swp8_relax(unsigned long long*ptr, unsigned long long newval) {
return __atomic_exchange_8(
ptr,
newval,
memory_order_relaxed
);
}
extern inline unsigned char __aarch64_ldclr1_relax(unsigned char*ptr, unsigned char mask) {
return __atomic_fetch_and_1(
ptr,
~mask,
memory_order_relaxed
);
}
extern inline unsigned short __aarch64_ldclr2_relax(unsigned short*ptr, unsigned short mask) {
return __atomic_fetch_and_2(
ptr,
~mask,
memory_order_relaxed
);
}
extern inline unsigned int __aarch64_ldclr4_relax(unsigned int*ptr, unsigned int mask) {
return __atomic_fetch_and_4(
ptr,
~mask,
memory_order_relaxed
);
}
extern inline unsigned long long __aarch64_ldclr8_relax(unsigned long long*ptr, unsigned long long mask) {
return __atomic_fetch_and_8(
ptr,
~mask,
memory_order_relaxed
);
}
extern inline unsigned char __aarch64_ldset1_relax(unsigned char*ptr, unsigned char mask) {
return __atomic_fetch_or_1(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned short __aarch64_ldset2_relax(unsigned short*ptr, unsigned short mask) {
return __atomic_fetch_or_2(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned int __aarch64_ldset4_relax(unsigned int*ptr, unsigned int mask) {
return __atomic_fetch_or_4(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned long long __aarch64_ldset8_relax(unsigned long long*ptr, unsigned long long mask) {
return __atomic_fetch_or_8(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned char __aarch64_ldeor1_relax(unsigned char*ptr, unsigned char mask) {
return __atomic_fetch_xor_1(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned short __aarch64_ldeor2_relax(unsigned short*ptr, unsigned short mask) {
return __atomic_fetch_xor_2(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned int __aarch64_ldeor4_relax(unsigned int*ptr, unsigned int mask) {
return __atomic_fetch_xor_4(
ptr,
mask,
memory_order_relaxed
);
}
extern inline unsigned long long __aarch64_ldeor8_relax(unsigned long long*ptr, unsigned long long mask) {
return __atomic_fetch_xor_8(
ptr,
mask,
memory_order_relaxed
);
}
#define aarch64_cas_relax(ptr, expected, desired) \
_Generic((ptr), \
char*: __aarch64_cas1_relax, \
short*: __aarch64_cas2_relax, \
int*: __aarch64_cas4_relax, \
long long*: __aarch64_cas8_relax \
)(ptr, expected, desired)
#endif // __aarch64__
#else
// Since V might be confused with "generic" C functions either we provide special versions