Go to the documentation of this file.
31 #if !defined(WIN32_LEAN_AND_MEAN)
32 #define WIN32_LEAN_AND_MEAN
38 #define AkThreadYield() Sleep(1);
45 #define AK_ATOMIC_FENCE_FULL_BARRIER() __dmb( _ARM_BARRIER_ISH )
46 #elif defined( _M_ARM64 )
47 #define AK_ATOMIC_FENCE_FULL_BARRIER() __dmb( _ARM64_BARRIER_ISH )
49 #define AK_ATOMIC_FENCE_FULL_BARRIER() MemoryBarrier();
52 #if defined( _M_ARM ) || defined( _M_ARM64 )
59 __forceinline
long AkAtomicDec32(
AkAtomic32* pValue ) {
return InterlockedExchangeAdd((
volatile long*)pValue, -1 ) - 1; }
61 __forceinline
long AkAtomicAdd32(
AkAtomic32* pDest,
long value ) {
return InterlockedExchangeAdd((
volatile long*)pDest, value ) + value; }
62 __forceinline
long AkAtomicSub32(
AkAtomic32* pDest,
long value ) {
return InterlockedExchangeAdd((
volatile long*)pDest, -value ) - value; }
63 __forceinline
long AkAtomicAnd32(
AkAtomic32* pDest,
long value ) {
return InterlockedAnd((
volatile long*)pDest, value) & value; }
64 __forceinline
long AkAtomicOr32(
AkAtomic32* pDest,
long value ) {
return InterlockedOr((
volatile long*)pDest, value) | value; }
65 __forceinline
int AkAtomicCas32(
AkAtomic32* pDest,
long proposed,
long expected ) {
return InterlockedCompareExchange((
volatile long*)pDest, proposed, expected ) == expected ? 1 : 0; }
68 #if defined( _M_ARM ) || defined( _M_ARM64 )
77 __forceinline
void AkAtomicStore64(
AkAtomic64* pDest,
long long value ) { InterlockedExchange64((
volatile long long*)pDest, value); }
78 __forceinline
long long AkAtomicInc64(
AkAtomic64* pValue ) {
return InterlockedExchangeAdd64((
volatile long long*)pValue, 1 ) + 1; }
79 __forceinline
long long AkAtomicDec64(
AkAtomic64* pValue ) {
return InterlockedExchangeAdd64((
volatile long long*)pValue, - 1 ) - 1; }
80 __forceinline
long long AkAtomicExchange64(
AkAtomic64* pDest,
long long value ) {
return InterlockedExchange64((
volatile long long*)pDest, value ); }
81 __forceinline
long long AkAtomicAdd64(
AkAtomic64* pDest,
long long value ) {
return InterlockedExchangeAdd64((
volatile long long*)pDest, value ) + value; }
82 __forceinline
long long AkAtomicSub64(
AkAtomic64* pDest,
long long value ) {
return InterlockedExchangeAdd64((
volatile long long*)pDest, -value ) - value; }
83 __forceinline
long long AkAtomicAnd64(
AkAtomic64* pDest,
long long value ) {
return InterlockedAnd64((
volatile long long*)pDest, value) & value; }
84 __forceinline
long long AkAtomicOr64(
AkAtomic64* pDest,
long long value ) {
return InterlockedOr64((
volatile long long*)pDest, value) | value; }
85 __forceinline
int AkAtomicCas64(
AkAtomic64* pDest,
long long proposed,
long long expected ) {
return InterlockedCompareExchange64((
volatile long long*)pDest, proposed, expected ) == expected ? 1 : 0; }
87 #if defined( _M_ARM ) || defined( _M_ARM64 )
94 __forceinline
void AkAtomicStorePtr(
AkAtomicPtr* pDest,
void* value ) { InterlockedExchangePointer( (
volatile PVOID* )pDest, value ); }
95 __forceinline
void*
AkAtomicExchangePtr(
AkAtomicPtr* pDest,
void* value ) {
return InterlockedExchangePointer( (
volatile PVOID* )pDest, value ); }
96 __forceinline
int AkAtomicCasPtr(
AkAtomicPtr* pDest,
void* proposed,
void* expected ) {
return InterlockedCompareExchangePointer( (
volatile PVOID* )pDest, proposed, expected ) == expected ? 1 : 0; }
100 __forceinline
int AkAtomicCasPtr(
AkAtomicPtr* pDest,
void* proposed,
void* expected) {
return InterlockedCompareExchangePointer( (
void** )pDest, proposed, expected ) == expected ? 1 : 0; }
103 #if defined(_MSC_VER)
106 #if defined(_M_IX86) || defined(_M_X64)
108 #elif defined( _M_ARM ) || defined( _M_ARM64 )
111 #error Unsupported platform for AkSpinHint
volatile void * AkAtomicPtr
volatile int32_t AkAtomic32
volatile int64_t AkAtomic64
Tell us about your project. We're here to help.
Register your project and we'll help you get started with no strings attached!
Get started with Wwise