# define tsan_load(ptr) atomic_load_explicit((ptr), memory_order_relaxed)
# define tsan_store(ptr, val) atomic_store_explicit((ptr), (val), memory_order_relaxed)
# define tsan_counter(ptr) atomic_fetch_add_explicit((ptr), 1, memory_order_relaxed)
+# define tsan_decr(ptr) atomic_fetch_add_explicit((ptr), -1, memory_order_relaxed)
# define tsan_ld_acq(ptr) atomic_load_explicit((ptr), memory_order_acquire)
# define tsan_st_rel(ptr, val) atomic_store_explicit((ptr), (val), memory_order_release)
# endif
# define tsan_load(ptr) __atomic_load_n((ptr), __ATOMIC_RELAXED)
# define tsan_store(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_RELAXED)
# define tsan_counter(ptr) __atomic_fetch_add((ptr), 1, __ATOMIC_RELAXED)
+# define tsan_decr(ptr) __atomic_fetch_add((ptr), -1, __ATOMIC_RELAXED)
# define tsan_ld_acq(ptr) __atomic_load_n((ptr), __ATOMIC_ACQUIRE)
# define tsan_st_rel(ptr, val) __atomic_store_n((ptr), (val), __ATOMIC_RELEASE)
# endif
# pragma intrinsic(_InterlockedExchangeAdd64)
# define tsan_counter(ptr) (sizeof(*(ptr)) == 8 ? _InterlockedExchangeAdd64((ptr), 1) \
: _InterlockedExchangeAdd((ptr), 1))
+# define tsan_decr(ptr) (sizeof(*(ptr)) == 8 ? _InterlockedExchangeAdd64((ptr), -1) \
+ : _InterlockedExchangeAdd((ptr), -1))
# else
# define tsan_counter(ptr) _InterlockedExchangeAdd((ptr), 1)
+# define tsan_decr(ptr) _InterlockedExchangeAdd((ptr), -1)
# endif
# if !defined(_ISO_VOLATILE)
# define tsan_ld_acq(ptr) (*(ptr))
# define tsan_load(ptr) (*(ptr))
# define tsan_store(ptr, val) (*(ptr) = (val))
# define tsan_counter(ptr) ((*(ptr))++)
+# define tsan_decr(ptr) ((*(ptr))--)
/*
* Lack of tsan_ld_acq and tsan_ld_rel means that compiler support is not
* sophisticated enough to support them. Code that relies on them should be