41#define __STRINGIFY(a) #a
57#define xt_rsil(level) (__extension__({uint32_t state; __asm__ __volatile__("rsil %0," __STRINGIFY(level) : "=a" (state)); state;}))
58#define xt_wsr_ps(state) __asm__ __volatile__("wsr %0,ps; isync" :: "a" (state) : "memory")
62 uint32_t savedPS = xt_rsil(15);
64#define REL_LOCK() xt_wsr_ps(savedPS);
66#elif defined(STM32F0xx) || (!defined(ARDUINO) && !defined(ESP_PLATFORM))
72 __asm volatile(" mrs %0, PRIMASK \n cpsid i\n" : "=r"(_pastlock));
75#define REL_LOCK() __asm volatile(" msr PRIMASK, %0\n " : : "r"(_pastlock));
181 _Bool weak,
int success_memorder,
int failure_memorder)
200 uint32_t desired, _Bool weak,
int success_memorder,
int failure_memorder)
203 uint32_t curr = *ptr;
uint8_t __atomic_exchange_1(uint8_t *ptr, uint8_t val, int memorder)
__atomic_exchange_1
uint32_t __atomic_fetch_and_4(uint32_t *ptr, uint32_t val, int memorder)
__atomic_fetch_and_4
#define ACQ_LOCK()
Disables interrupts and saves the interrupt enable flag in a register.
uint8_t __atomic_fetch_or_1(uint8_t *ptr, uint8_t val, int memorder)
__atomic_fetch_or_1
uint8_t __atomic_fetch_add_1(uint8_t *ptr, uint8_t val, int memorder)
__atomic_fetch_add_1
uint32_t __atomic_fetch_or_4(uint32_t *ptr, uint32_t val, int memorder)
__atomic_fetch_or_4
#define REL_LOCK()
Restores the interrupte enable flag from a register.
uint8_t __atomic_fetch_and_1(uint8_t *ptr, uint8_t val, int memorder)
__atomic_fetch_and_1
uint16_t __atomic_fetch_sub_2(uint16_t *ptr, uint16_t val, int memorder)
__atomic_fetch_sub_2
_Bool __atomic_compare_exchange_4(uint32_t *ptr, uint32_t *exp, uint32_t desired, _Bool weak, int success_memorder, int failure_memorder)
__atomic_compare_exchange_4
_Bool __atomic_compare_exchange_1(uint8_t *ptr, uint8_t *exp, uint8_t desired, _Bool weak, int success_memorder, int failure_memorder)
__atomic_compare_exchange_1