2 * FreeRTOS Kernel V10.2.0
\r
3 * Copyright (C) 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
\r
5 * Permission is hereby granted, free of charge, to any person obtaining a copy of
\r
6 * this software and associated documentation files (the "Software"), to deal in
\r
7 * the Software without restriction, including without limitation the rights to
\r
8 * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
\r
9 * the Software, and to permit persons to whom the Software is furnished to do so,
\r
10 * subject to the following conditions:
\r
12 * The above copyright notice and this permission notice shall be included in all
\r
13 * copies or substantial portions of the Software.
\r
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
\r
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
\r
17 * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
\r
18 * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
\r
19 * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
\r
20 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\r
22 * http://www.FreeRTOS.org
\r
23 * http://aws.amazon.com/freertos
\r
25 * 1 tab == 4 spaces!
\r
30 * @brief FreeRTOS atomic operation support.
\r
32 * Two implementations of atomic are given in this header file:
\r
33 * 1. Disabling interrupt globally.
\r
34 * 2. ISA native atomic support.
\r
35 * The former is available to all ports (compiler-architecture combination),
\r
36 * while the latter is only available to ports compiling with GCC (version at
\r
37 * least 4.7.0), which also have ISA atomic support.
\r
39 * User can select which implementation to use by:
\r
40 * setting/clearing configUSE_ATOMIC_INSTRUCTION in FreeRTOSConfig.h.
\r
41 * Define AND set configUSE_ATOMIC_INSTRUCTION to 1 for ISA native atomic support.
\r
42 * Undefine OR clear configUSE_ATOMIC_INSTRUCTION for disabling global interrupt
\r
45 * @see GCC Built-in Functions for Memory Model Aware Atomic Operations
\r
46 * https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
\r
52 #ifndef INC_FREERTOS_H
\r
53 #error "include FreeRTOS.h must appear in source files before include atomic.h"
\r
56 /* Standard includes. */
\r
63 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
65 /* Needed for __atomic_compare_exchange() weak=false. */
\r
66 #include <stdbool.h>
\r
68 /* This branch is for GCC compiler and GCC compiler only. */
\r
69 #ifndef portFORCE_INLINE
\r
70 #define portFORCE_INLINE inline __attribute__((always_inline))
\r
75 /* Port specific definitions -- entering/exiting critical section.
\r
76 * Refer template -- ./lib/FreeRTOS/portable/Compiler/Arch/portmacro.h
\r
78 * Every call to ATOMIC_EXIT_CRITICAL() must be closely paired with
\r
79 * ATOMIC_ENTER_CRITICAL().
\r
81 #if defined( portSET_INTERRUPT_MASK_FROM_ISR )
\r
83 /* Nested interrupt scheme is supported in this port. */
\r
84 #define ATOMIC_ENTER_CRITICAL() \
\r
85 UBaseType_t uxCriticalSectionType = portSET_INTERRUPT_MASK_FROM_ISR()
\r
87 #define ATOMIC_EXIT_CRITICAL() \
\r
88 portCLEAR_INTERRUPT_MASK_FROM_ISR( uxCriticalSectionType )
\r
92 /* Nested interrupt scheme is NOT supported in this port. */
\r
93 #define ATOMIC_ENTER_CRITICAL() portENTER_CRITICAL()
\r
94 #define ATOMIC_EXIT_CRITICAL() portEXIT_CRITICAL()
\r
96 #endif /* portSET_INTERRUPT_MASK_FROM_ISR() */
\r
98 /* Port specific definition -- "always inline".
\r
99 * Inline is compiler specific, and may not always get inlined depending on your optimization level.
\r
100 * Also, inline is considerred as performance optimization for atomic.
\r
101 * Thus, if portFORCE_INLINE is not provided by portmacro.h, instead of resulting error,
\r
102 * simply define it.
\r
104 #ifndef portFORCE_INLINE
\r
105 #define portFORCE_INLINE
\r
108 #endif /* configUSE_GCC_BUILTIN_ATOMICS */
\r
110 #define ATOMIC_COMPARE_AND_SWAP_SUCCESS 0x1U /**< Compare and swap succeeded, swapped. */
\r
111 #define ATOMIC_COMPARE_AND_SWAP_FAILURE 0x0U /**< Compare and swap failed, did not swap. */
\r
113 /*----------------------------- Swap && CAS ------------------------------*/
\r
116 * Atomic compare-and-swap
\r
118 * @brief Performs an atomic compare-and-swap operation on the specified values.
\r
120 * @param[in, out] pDestination Pointer to memory location from where value is
\r
121 * to be loaded and checked.
\r
122 * @param[in] ulExchange If condition meets, write this value to memory.
\r
123 * @param[in] ulComparand Swap condition.
\r
125 * @return Unsigned integer of value 1 or 0. 1 for swapped, 0 for not swapped.
\r
127 * @note This function only swaps *pDestination with ulExchange, if previous
\r
128 * *pDestination value equals ulComparand.
\r
130 static portFORCE_INLINE uint32_t Atomic_CompareAndSwap_u32(
\r
131 uint32_t volatile * pDestination,
\r
132 uint32_t ulExchange,
\r
133 uint32_t ulComparand )
\r
136 uint32_t ulReturnValue = ATOMIC_COMPARE_AND_SWAP_FAILURE;
\r
138 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
140 if ( __atomic_compare_exchange( pDestination,
\r
145 __ATOMIC_SEQ_CST ) )
\r
147 ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;
\r
152 ATOMIC_ENTER_CRITICAL();
\r
154 if ( *pDestination == ulComparand )
\r
156 *pDestination = ulExchange;
\r
157 ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;
\r
160 ATOMIC_EXIT_CRITICAL();
\r
164 return ulReturnValue;
\r
169 * Atomic swap (pointers)
\r
171 * @brief Atomically sets the address pointed to by *ppDestination to the value
\r
174 * @param[in, out] ppDestination Pointer to memory location from where a pointer
\r
175 * value is to be loaded and written back to.
\r
176 * @param[in] pExchange Pointer value to be written to *ppDestination.
\r
178 * @return The initial value of *ppDestination.
\r
180 static portFORCE_INLINE void * Atomic_SwapPointers_p32(
\r
181 void * volatile * ppDestination,
\r
184 void * pReturnValue;
\r
186 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
188 __atomic_exchange( ppDestination, &pExchange, &pReturnValue, __ATOMIC_SEQ_CST );
\r
192 ATOMIC_ENTER_CRITICAL();
\r
194 pReturnValue = *ppDestination;
\r
196 *ppDestination = pExchange;
\r
198 ATOMIC_EXIT_CRITICAL();
\r
202 return pReturnValue;
\r
206 * Atomic compare-and-swap (pointers)
\r
208 * @brief Performs an atomic compare-and-swap operation on the specified pointer
\r
211 * @param[in, out] ppDestination Pointer to memory location from where a pointer
\r
212 * value is to be loaded and checked.
\r
213 * @param[in] pExchange If condition meets, write this value to memory.
\r
214 * @param[in] pComparand Swap condition.
\r
216 * @return Unsigned integer of value 1 or 0. 1 for swapped, 0 for not swapped.
\r
218 * @note This function only swaps *ppDestination with pExchange, if previous
\r
219 * *ppDestination value equals pComparand.
\r
221 static portFORCE_INLINE uint32_t Atomic_CompareAndSwapPointers_p32(
\r
222 void * volatile * ppDestination,
\r
223 void * pExchange, void * pComparand )
\r
225 uint32_t ulReturnValue = ATOMIC_COMPARE_AND_SWAP_FAILURE;
\r
227 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
228 if ( __atomic_compare_exchange( ppDestination,
\r
233 __ATOMIC_SEQ_CST ) )
\r
235 ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;
\r
240 ATOMIC_ENTER_CRITICAL();
\r
242 if ( *ppDestination == pComparand )
\r
244 *ppDestination = pExchange;
\r
245 ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;
\r
248 ATOMIC_EXIT_CRITICAL();
\r
252 return ulReturnValue;
\r
256 /*----------------------------- Arithmetic ------------------------------*/
\r
261 * @brief Atomically adds count to the value of the specified pointer points to.
\r
263 * @param[in,out] pAddend Pointer to memory location from where value is to be
\r
264 * loaded and written back to.
\r
265 * @param[in] ulCount Value to be added to *pAddend.
\r
267 * @return previous *pAddend value.
\r
269 static portFORCE_INLINE uint32_t Atomic_Add_u32(
\r
270 uint32_t volatile * pAddend,
\r
273 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
275 return __atomic_fetch_add(pAddend, ulCount, __ATOMIC_SEQ_CST);
\r
279 uint32_t ulCurrent;
\r
281 ATOMIC_ENTER_CRITICAL();
\r
283 ulCurrent = *pAddend;
\r
285 *pAddend += ulCount;
\r
287 ATOMIC_EXIT_CRITICAL();
\r
297 * @brief Atomically subtracts count from the value of the specified pointer
\r
300 * @param[in,out] pAddend Pointer to memory location from where value is to be
\r
301 * loaded and written back to.
\r
302 * @param[in] ulCount Value to be subtract from *pAddend.
\r
304 * @return previous *pAddend value.
\r
306 static portFORCE_INLINE uint32_t Atomic_Subtract_u32(
\r
307 uint32_t volatile * pAddend,
\r
310 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
312 return __atomic_fetch_sub(pAddend, ulCount, __ATOMIC_SEQ_CST);
\r
316 uint32_t ulCurrent;
\r
318 ATOMIC_ENTER_CRITICAL();
\r
320 ulCurrent = *pAddend;
\r
322 *pAddend -= ulCount;
\r
324 ATOMIC_EXIT_CRITICAL();
\r
334 * @brief Atomically increments the value of the specified pointer points to.
\r
336 * @param[in,out] pAddend Pointer to memory location from where value is to be
\r
337 * loaded and written back to.
\r
339 * @return *pAddend value before increment.
\r
341 static portFORCE_INLINE uint32_t Atomic_Increment_u32( uint32_t volatile * pAddend )
\r
343 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
345 return __atomic_fetch_add(pAddend, 1, __ATOMIC_SEQ_CST);
\r
349 uint32_t ulCurrent;
\r
351 ATOMIC_ENTER_CRITICAL();
\r
353 ulCurrent = *pAddend;
\r
357 ATOMIC_EXIT_CRITICAL();
\r
367 * @brief Atomically decrements the value of the specified pointer points to
\r
369 * @param[in,out] pAddend Pointer to memory location from where value is to be
\r
370 * loaded and written back to.
\r
372 * @return *pAddend value before decrement.
\r
374 static portFORCE_INLINE uint32_t Atomic_Decrement_u32( uint32_t volatile * pAddend )
\r
376 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
378 return __atomic_fetch_sub(pAddend, 1, __ATOMIC_SEQ_CST);
\r
382 uint32_t ulCurrent;
\r
384 ATOMIC_ENTER_CRITICAL();
\r
386 ulCurrent = *pAddend;
\r
390 ATOMIC_EXIT_CRITICAL();
\r
397 /*----------------------------- Bitwise Logical ------------------------------*/
\r
402 * @brief Performs an atomic OR operation on the specified values.
\r
404 * @param [in, out] pDestination Pointer to memory location from where value is
\r
405 * to be loaded and written back to.
\r
406 * @param [in] ulValue Value to be ORed with *pDestination.
\r
408 * @return The original value of *pDestination.
\r
410 static portFORCE_INLINE uint32_t Atomic_OR_u32(
\r
411 uint32_t volatile * pDestination,
\r
414 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
416 return __atomic_fetch_or(pDestination, ulValue, __ATOMIC_SEQ_CST);
\r
420 uint32_t ulCurrent;
\r
422 ATOMIC_ENTER_CRITICAL();
\r
424 ulCurrent = *pDestination;
\r
426 *pDestination |= ulValue;
\r
428 ATOMIC_EXIT_CRITICAL();
\r
438 * @brief Performs an atomic AND operation on the specified values.
\r
440 * @param [in, out] pDestination Pointer to memory location from where value is
\r
441 * to be loaded and written back to.
\r
442 * @param [in] ulValue Value to be ANDed with *pDestination.
\r
444 * @return The original value of *pDestination.
\r
446 static portFORCE_INLINE uint32_t Atomic_AND_u32(
\r
447 uint32_t volatile * pDestination,
\r
450 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
452 return __atomic_fetch_and(pDestination, ulValue, __ATOMIC_SEQ_CST);
\r
456 uint32_t ulCurrent;
\r
458 ATOMIC_ENTER_CRITICAL();
\r
460 ulCurrent = *pDestination;
\r
462 *pDestination &= ulValue;
\r
464 ATOMIC_EXIT_CRITICAL();
\r
474 * @brief Performs an atomic NAND operation on the specified values.
\r
476 * @param [in, out] pDestination Pointer to memory location from where value is
\r
477 * to be loaded and written back to.
\r
478 * @param [in] ulValue Value to be NANDed with *pDestination.
\r
480 * @return The original value of *pDestination.
\r
482 static portFORCE_INLINE uint32_t Atomic_NAND_u32(
\r
483 uint32_t volatile * pDestination,
\r
486 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
488 return __atomic_fetch_nand(pDestination, ulValue, __ATOMIC_SEQ_CST);
\r
492 uint32_t ulCurrent;
\r
494 ATOMIC_ENTER_CRITICAL();
\r
496 ulCurrent = *pDestination;
\r
498 *pDestination = ~(ulCurrent & ulValue);
\r
500 ATOMIC_EXIT_CRITICAL();
\r
510 * @brief Performs an atomic XOR operation on the specified values.
\r
512 * @param [in, out] pDestination Pointer to memory location from where value is
\r
513 * to be loaded and written back to.
\r
514 * @param [in] ulValue Value to be XORed with *pDestination.
\r
516 * @return The original value of *pDestination.
\r
518 static portFORCE_INLINE uint32_t Atomic_XOR_u32(
\r
519 uint32_t volatile * pDestination,
\r
522 #if defined ( configUSE_GCC_BUILTIN_ATOMICS ) && ( configUSE_GCC_BUILTIN_ATOMICS == 1 )
\r
524 return __atomic_fetch_xor(pDestination, ulValue, __ATOMIC_SEQ_CST);
\r
528 uint32_t ulCurrent;
\r
530 ATOMIC_ENTER_CRITICAL();
\r
532 ulCurrent = *pDestination;
\r
534 *pDestination ^= ulValue;
\r
536 ATOMIC_EXIT_CRITICAL();
\r
547 #endif /* ATOMIC_H */
\r