From d171a0f264da10b42b18aac1ddc0847cb7ff495a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9C=D0=B8=D0=BB=D0=BE=D1=88=20=D0=A2=D0=BE=D1=88=D0=B8?= =?UTF-8?q?=D1=9B?= Date: Thu, 23 May 2024 18:15:04 +0200 Subject: [PATCH] Use intrinsic functions for memory barriers instead of inline assembly (#327) --- include/bx/inline/cpu.inl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/include/bx/inline/cpu.inl b/include/bx/inline/cpu.inl index e95fd5c..bc0e64e 100644 --- a/include/bx/inline/cpu.inl +++ b/include/bx/inline/cpu.inl @@ -70,7 +70,7 @@ namespace bx #if BX_COMPILER_MSVC _ReadBarrier(); #else - asm volatile("":::"memory"); + __atomic_thread_fence(__ATOMIC_RELEASE); #endif // BX_COMPILER_* } @@ -79,7 +79,7 @@ namespace bx #if BX_COMPILER_MSVC _WriteBarrier(); #else - asm volatile("":::"memory"); + __atomic_thread_fence(__ATOMIC_ACQUIRE); #endif // BX_COMPILER_* } @@ -88,7 +88,7 @@ namespace bx #if BX_COMPILER_MSVC _ReadWriteBarrier(); #else - asm volatile("":::"memory"); + __atomic_thread_fence(__ATOMIC_ACQ_REL); #endif // BX_COMPILER_* }