* @file atomic.h\r
* @brief FreeRTOS atomic operation support.\r
*\r
- * This file implements atomic by disabling interrupts globally.\r
- * Implementation with architecture specific atomic instructions\r
- * are to be provided under each compiler directory.\r
+ * This file implements atomic functions by disabling interrupts globally.\r
+ * Implementations with architecture specific atomic instructions can be\r
+ * provided under each compiler directory.\r
*/\r
\r
#ifndef ATOMIC_H\r
extern "C" {\r
#endif\r
\r
-/* Port specific definitions -- entering/exiting critical section.\r
+/*\r
+ * Port specific definitions -- entering/exiting critical section.\r
* Refer template -- ./lib/FreeRTOS/portable/Compiler/Arch/portmacro.h\r
*\r
* Every call to ATOMIC_EXIT_CRITICAL() must be closely paired with\r
* ATOMIC_ENTER_CRITICAL().\r
- * */\r
+ *\r
+ */\r
#if defined( portSET_INTERRUPT_MASK_FROM_ISR )\r
\r
/* Nested interrupt scheme is supported in this port. */\r
\r
#endif /* portSET_INTERRUPT_MASK_FROM_ISR() */\r
\r
-/* Port specific definition -- "always inline".\r
- * Inline is compiler specific, and may not always get inlined depending on your optimization level.\r
- * Also, inline is considerred as performance optimization for atomic.\r
- * Thus, if portFORCE_INLINE is not provided by portmacro.h, instead of resulting error,\r
- * simply define it.\r
+/*\r
+ * Port specific definition -- "always inline".\r
+ * Inline is compiler specific, and may not always get inlined depending on your\r
+ * optimization level. Also, inline is considered as performance optimization\r
+ * for atomic. Thus, if portFORCE_INLINE is not provided by portmacro.h,\r
+ * instead of resulting error, simply define it away.\r
*/\r
#ifndef portFORCE_INLINE\r
#define portFORCE_INLINE\r
*\r
* @brief Performs an atomic compare-and-swap operation on the specified values.\r
*\r
- * @param[in, out] pDestination Pointer to memory location from where value is\r
- * to be loaded and checked.\r
- * @param[in] ulExchange If condition meets, write this value to memory.\r
- * @param[in] ulComparand Swap condition.\r
+ * @param[in, out] pulDestination Pointer to memory location from where value is\r
+ * to be loaded and checked.\r
+ * @param[in] ulExchange If condition meets, write this value to memory.\r
+ * @param[in] ulComparand Swap condition.\r
*\r
* @return Unsigned integer of value 1 or 0. 1 for swapped, 0 for not swapped.\r
*\r
- * @note This function only swaps *pDestination with ulExchange, if previous\r
- * *pDestination value equals ulComparand.\r
+ * @note This function only swaps *pulDestination with ulExchange, if previous\r
+ * *pulDestination value equals ulComparand.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_CompareAndSwap_u32(\r
- uint32_t volatile * pDestination,\r
- uint32_t ulExchange,\r
- uint32_t ulComparand )\r
+static portFORCE_INLINE uint32_t Atomic_CompareAndSwap_u32( uint32_t volatile * pulDestination,\r
+ uint32_t ulExchange,\r
+ uint32_t ulComparand )\r
{\r
-\r
- uint32_t ulReturnValue = ATOMIC_COMPARE_AND_SWAP_FAILURE;\r
+uint32_t ulReturnValue;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- if ( *pDestination == ulComparand )\r
{\r
- *pDestination = ulExchange;\r
- ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;\r
+ if( *pulDestination == ulComparand )\r
+ {\r
+ *pulDestination = ulExchange;\r
+ ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;\r
+ }\r
+ else\r
+ {\r
+ ulReturnValue = ATOMIC_COMPARE_AND_SWAP_FAILURE;\r
+ }\r
}\r
-\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulReturnValue;\r
-\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic swap (pointers)\r
*\r
- * @brief Atomically sets the address pointed to by *ppDestination to the value\r
- * of *pExchange.\r
+ * @brief Atomically sets the address pointed to by *ppvDestination to the value\r
+ * of *pvExchange.\r
*\r
- * @param[in, out] ppDestination Pointer to memory location from where a pointer\r
- * value is to be loaded and written back to.\r
- * @param[in] pExchange Pointer value to be written to *ppDestination.\r
+ * @param[in, out] ppvDestination Pointer to memory location from where a pointer\r
+ * value is to be loaded and written back to.\r
+ * @param[in] pvExchange Pointer value to be written to *ppvDestination.\r
*\r
- * @return The initial value of *ppDestination.\r
+ * @return The initial value of *ppvDestination.\r
*/\r
-static portFORCE_INLINE void * Atomic_SwapPointers_p32(\r
- void * volatile * ppDestination,\r
- void * pExchange )\r
+static portFORCE_INLINE void * Atomic_SwapPointers_p32( void * volatile * ppvDestination,\r
+ void * pvExchange )\r
{\r
- void * pReturnValue;\r
+void * pReturnValue;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- pReturnValue = *ppDestination;\r
-\r
- *ppDestination = pExchange;\r
-\r
+ {\r
+ pReturnValue = *ppvDestination;\r
+ *ppvDestination = pvExchange;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return pReturnValue;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic compare-and-swap (pointers)\r
*\r
* @brief Performs an atomic compare-and-swap operation on the specified pointer\r
- * values.\r
+ * values.\r
*\r
- * @param[in, out] ppDestination Pointer to memory location from where a pointer\r
- * value is to be loaded and checked.\r
- * @param[in] pExchange If condition meets, write this value to memory.\r
- * @param[in] pComparand Swap condition.\r
+ * @param[in, out] ppvDestination Pointer to memory location from where a pointer\r
+ * value is to be loaded and checked.\r
+ * @param[in] pvExchange If condition meets, write this value to memory.\r
+ * @param[in] pvComparand Swap condition.\r
*\r
* @return Unsigned integer of value 1 or 0. 1 for swapped, 0 for not swapped.\r
*\r
- * @note This function only swaps *ppDestination with pExchange, if previous\r
- * *ppDestination value equals pComparand.\r
+ * @note This function only swaps *ppvDestination with pvExchange, if previous\r
+ * *ppvDestination value equals pvComparand.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_CompareAndSwapPointers_p32(\r
- void * volatile * ppDestination,\r
- void * pExchange, void * pComparand )\r
+static portFORCE_INLINE uint32_t Atomic_CompareAndSwapPointers_p32( void * volatile * ppvDestination,\r
+ void * pvExchange,\r
+ void * pvComparand )\r
{\r
- uint32_t ulReturnValue = ATOMIC_COMPARE_AND_SWAP_FAILURE;\r
+uint32_t ulReturnValue = ATOMIC_COMPARE_AND_SWAP_FAILURE;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- if ( *ppDestination == pComparand )\r
{\r
- *ppDestination = pExchange;\r
- ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;\r
+ if( *ppvDestination == pvComparand )\r
+ {\r
+ *ppvDestination = pvExchange;\r
+ ulReturnValue = ATOMIC_COMPARE_AND_SWAP_SUCCESS;\r
+ }\r
}\r
-\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulReturnValue;\r
*\r
* @brief Atomically adds count to the value of the specified pointer points to.\r
*\r
- * @param[in,out] pAddend Pointer to memory location from where value is to be\r
- * loaded and written back to.\r
- * @param[in] ulCount Value to be added to *pAddend.\r
+ * @param[in,out] pulAddend Pointer to memory location from where value is to be\r
+ * loaded and written back to.\r
+ * @param[in] ulCount Value to be added to *pulAddend.\r
*\r
- * @return previous *pAddend value.\r
+ * @return previous *pulAddend value.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_Add_u32(\r
- uint32_t volatile * pAddend,\r
- uint32_t ulCount )\r
+static portFORCE_INLINE uint32_t Atomic_Add_u32( uint32_t volatile * pulAddend,\r
+ uint32_t ulCount )\r
{\r
uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pAddend;\r
-\r
- *pAddend += ulCount;\r
-\r
+ {\r
+ ulCurrent = *pulAddend;\r
+ *pulAddend += ulCount;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic subtract\r
*\r
* @brief Atomically subtracts count from the value of the specified pointer\r
- * pointers to.\r
+ * pointers to.\r
*\r
- * @param[in,out] pAddend Pointer to memory location from where value is to be\r
- * loaded and written back to.\r
- * @param[in] ulCount Value to be subtract from *pAddend.\r
+ * @param[in,out] pulAddend Pointer to memory location from where value is to be\r
+ * loaded and written back to.\r
+ * @param[in] ulCount Value to be subtract from *pulAddend.\r
*\r
- * @return previous *pAddend value.\r
+ * @return previous *pulAddend value.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_Subtract_u32(\r
- uint32_t volatile * pAddend,\r
- uint32_t ulCount )\r
+static portFORCE_INLINE uint32_t Atomic_Subtract_u32( uint32_t volatile * pulAddend,\r
+ uint32_t ulCount )\r
{\r
uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pAddend;\r
-\r
- *pAddend -= ulCount;\r
-\r
+ {\r
+ ulCurrent = *pulAddend;\r
+ *pulAddend -= ulCount;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic increment\r
*\r
* @brief Atomically increments the value of the specified pointer points to.\r
*\r
- * @param[in,out] pAddend Pointer to memory location from where value is to be\r
- * loaded and written back to.\r
+ * @param[in,out] pulAddend Pointer to memory location from where value is to be\r
+ * loaded and written back to.\r
*\r
- * @return *pAddend value before increment.\r
+ * @return *pulAddend value before increment.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_Increment_u32( uint32_t volatile * pAddend )\r
+static portFORCE_INLINE uint32_t Atomic_Increment_u32( uint32_t volatile * pulAddend )\r
{\r
- uint32_t ulCurrent;\r
+uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pAddend;\r
-\r
- *pAddend += 1;\r
-\r
+ {\r
+ ulCurrent = *pulAddend;\r
+ *pulAddend += 1;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic decrement\r
*\r
* @brief Atomically decrements the value of the specified pointer points to\r
*\r
- * @param[in,out] pAddend Pointer to memory location from where value is to be\r
- * loaded and written back to.\r
+ * @param[in,out] pulAddend Pointer to memory location from where value is to be\r
+ * loaded and written back to.\r
*\r
- * @return *pAddend value before decrement.\r
+ * @return *pulAddend value before decrement.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_Decrement_u32( uint32_t volatile * pAddend )\r
+static portFORCE_INLINE uint32_t Atomic_Decrement_u32( uint32_t volatile * pulAddend )\r
{\r
- uint32_t ulCurrent;\r
+uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pAddend;\r
-\r
- *pAddend -= 1;\r
-\r
+ {\r
+ ulCurrent = *pulAddend;\r
+ *pulAddend -= 1;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
*\r
* @brief Performs an atomic OR operation on the specified values.\r
*\r
- * @param [in, out] pDestination Pointer to memory location from where value is\r
- * to be loaded and written back to.\r
- * @param [in] ulValue Value to be ORed with *pDestination.\r
+ * @param [in, out] pulDestination Pointer to memory location from where value is\r
+ * to be loaded and written back to.\r
+ * @param [in] ulValue Value to be ORed with *pulDestination.\r
*\r
- * @return The original value of *pDestination.\r
+ * @return The original value of *pulDestination.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_OR_u32(\r
- uint32_t volatile * pDestination,\r
- uint32_t ulValue )\r
+static portFORCE_INLINE uint32_t Atomic_OR_u32( uint32_t volatile * pulDestination,\r
+ uint32_t ulValue )\r
{\r
- uint32_t ulCurrent;\r
+uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pDestination;\r
-\r
- *pDestination |= ulValue;\r
-\r
+ {\r
+ ulCurrent = *pulDestination;\r
+ *pulDestination |= ulValue;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic AND\r
*\r
* @brief Performs an atomic AND operation on the specified values.\r
*\r
- * @param [in, out] pDestination Pointer to memory location from where value is\r
- * to be loaded and written back to.\r
- * @param [in] ulValue Value to be ANDed with *pDestination.\r
+ * @param [in, out] pulDestination Pointer to memory location from where value is\r
+ * to be loaded and written back to.\r
+ * @param [in] ulValue Value to be ANDed with *pulDestination.\r
*\r
- * @return The original value of *pDestination.\r
+ * @return The original value of *pulDestination.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_AND_u32(\r
- uint32_t volatile * pDestination,\r
- uint32_t ulValue )\r
+static portFORCE_INLINE uint32_t Atomic_AND_u32( uint32_t volatile * pulDestination,\r
+ uint32_t ulValue )\r
{\r
- uint32_t ulCurrent;\r
+uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pDestination;\r
-\r
- *pDestination &= ulValue;\r
-\r
+ {\r
+ ulCurrent = *pulDestination;\r
+ *pulDestination &= ulValue;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic NAND\r
*\r
* @brief Performs an atomic NAND operation on the specified values.\r
*\r
- * @param [in, out] pDestination Pointer to memory location from where value is\r
- * to be loaded and written back to.\r
- * @param [in] ulValue Value to be NANDed with *pDestination.\r
+ * @param [in, out] pulDestination Pointer to memory location from where value is\r
+ * to be loaded and written back to.\r
+ * @param [in] ulValue Value to be NANDed with *pulDestination.\r
*\r
- * @return The original value of *pDestination.\r
+ * @return The original value of *pulDestination.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_NAND_u32(\r
- uint32_t volatile * pDestination,\r
- uint32_t ulValue )\r
+static portFORCE_INLINE uint32_t Atomic_NAND_u32( uint32_t volatile * pulDestination,\r
+ uint32_t ulValue )\r
{\r
- uint32_t ulCurrent;\r
+uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pDestination;\r
-\r
- *pDestination = ~(ulCurrent & ulValue);\r
-\r
+ {\r
+ ulCurrent = *pulDestination;\r
+ *pulDestination = ~( ulCurrent & ulValue );\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
}\r
+/*-----------------------------------------------------------*/\r
\r
/**\r
* Atomic XOR\r
*\r
* @brief Performs an atomic XOR operation on the specified values.\r
*\r
- * @param [in, out] pDestination Pointer to memory location from where value is\r
- * to be loaded and written back to.\r
- * @param [in] ulValue Value to be XORed with *pDestination.\r
+ * @param [in, out] pulDestination Pointer to memory location from where value is\r
+ * to be loaded and written back to.\r
+ * @param [in] ulValue Value to be XORed with *pulDestination.\r
*\r
- * @return The original value of *pDestination.\r
+ * @return The original value of *pulDestination.\r
*/\r
-static portFORCE_INLINE uint32_t Atomic_XOR_u32(\r
- uint32_t volatile * pDestination,\r
- uint32_t ulValue )\r
+static portFORCE_INLINE uint32_t Atomic_XOR_u32( uint32_t volatile * pulDestination,\r
+ uint32_t ulValue )\r
{\r
- uint32_t ulCurrent;\r
+uint32_t ulCurrent;\r
\r
ATOMIC_ENTER_CRITICAL();\r
-\r
- ulCurrent = *pDestination;\r
-\r
- *pDestination ^= ulValue;\r
-\r
+ {\r
+ ulCurrent = *pulDestination;\r
+ *pulDestination ^= ulValue;\r
+ }\r
ATOMIC_EXIT_CRITICAL();\r
\r
return ulCurrent;\r
responsible for resulting newlib operation. User must be familiar with\r
newlib and must provide system-wide implementations of the necessary\r
stubs. Be warned that (at the time of writing) the current newlib design\r
- implements a system-wide malloc() that must be provided with locks. */\r
+ implements a system-wide malloc() that must be provided with locks.\r
+\r
+ See the third party link http://www.nadler.com/embedded/newlibAndFreeRTOS.html\r
+ for additional information. */\r
struct _reent xNewLib_reent;\r
#endif\r
\r
\r
#if ( configUSE_NEWLIB_REENTRANT == 1 )\r
{\r
- /* Initialise this task's Newlib reent structure. */\r
+ /* Initialise this task's Newlib reent structure.\r
+ See the third party link http://www.nadler.com/embedded/newlibAndFreeRTOS.html\r
+ for additional information. */\r
_REENT_INIT_PTR( ( &( pxNewTCB->xNewLib_reent ) ) );\r
}\r
#endif\r
else\r
{\r
--uxCurrentNumberOfTasks;\r
+ traceTASK_DELETE( pxTCB );\r
prvDeleteTCB( pxTCB );\r
\r
/* Reset the next expected unblock time in case it referred to\r
the task that has just been deleted. */\r
prvResetNextTaskUnblockTime();\r
- traceTASK_DELETE( pxTCB );\r
}\r
}\r
taskEXIT_CRITICAL();\r
#if ( configUSE_NEWLIB_REENTRANT == 1 )\r
{\r
/* Switch Newlib's _impure_ptr variable to point to the _reent\r
- structure specific to the task that will run first. */\r
+ structure specific to the task that will run first.\r
+ See the third party link http://www.nadler.com/embedded/newlibAndFreeRTOS.html\r
+ for additional information. */\r
_impure_ptr = &( pxCurrentTCB->xNewLib_reent );\r
}\r
#endif /* configUSE_NEWLIB_REENTRANT */\r
}\r
}\r
#endif /* configUSE_TICK_HOOK */\r
+\r
+ #if ( configUSE_PREEMPTION == 1 )\r
+ {\r
+ if( xYieldPending != pdFALSE )\r
+ {\r
+ xSwitchRequired = pdTRUE;\r
+ }\r
+ else\r
+ {\r
+ mtCOVERAGE_TEST_MARKER();\r
+ }\r
+ }\r
+ #endif /* configUSE_PREEMPTION */\r
}\r
else\r
{\r
#endif\r
}\r
\r
- #if ( configUSE_PREEMPTION == 1 )\r
- {\r
- if( xYieldPending != pdFALSE )\r
- {\r
- xSwitchRequired = pdTRUE;\r
- }\r
- else\r
- {\r
- mtCOVERAGE_TEST_MARKER();\r
- }\r
- }\r
- #endif /* configUSE_PREEMPTION */\r
-\r
return xSwitchRequired;\r
}\r
/*-----------------------------------------------------------*/\r
#if ( configUSE_NEWLIB_REENTRANT == 1 )\r
{\r
/* Switch Newlib's _impure_ptr variable to point to the _reent\r
- structure specific to this task. */\r
+ structure specific to this task.\r
+ See the third party link http://www.nadler.com/embedded/newlibAndFreeRTOS.html\r
+ for additional information. */\r
_impure_ptr = &( pxCurrentTCB->xNewLib_reent );\r
}\r
#endif /* configUSE_NEWLIB_REENTRANT */\r
portCLEAN_UP_TCB( pxTCB );\r
\r
/* Free up the memory allocated by the scheduler for the task. It is up\r
- to the task to free any memory allocated at the application level. */\r
+ to the task to free any memory allocated at the application level.\r
+ See the third party link http://www.nadler.com/embedded/newlibAndFreeRTOS.html\r
+ for additional information. */\r
#if ( configUSE_NEWLIB_REENTRANT == 1 )\r
{\r
_reclaim_reent( &( pxTCB->xNewLib_reent ) );\r