1 /**************************************************************************//**
3 * @brief CMSIS Cortex-M Core Instruction Access Header File
8 * Copyright (C) 2009-2012 ARM Limited. All rights reserved.
11 * ARM Limited (ARM) is supplying this software for use with Cortex-M
12 * processor based microcontrollers. This file can be freely distributed
13 * within development tools that are supporting such ARM based processors.
16 * THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
17 * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
19 * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
20 * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
22 ******************************************************************************/
24 #ifndef __CORE_CMINSTR_H
25 #define __CORE_CMINSTR_H
28 /* ########################## Core Instruction Access ######################### */
29 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
30 Access to dedicated instructions
34 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
35 /* ARM armcc specific functions */
37 #if (__ARMCC_VERSION < 400677)
38 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
42 /** \brief No Operation
44 No Operation does nothing. This instruction can be used for code alignment purposes.
49 /** \brief Wait For Interrupt
51 Wait For Interrupt is a hint instruction that suspends execution
52 until one of a number of events occurs.
57 /** \brief Wait For Event
59 Wait For Event is a hint instruction that permits the processor to enter
60 a low-power state until one of a number of events occurs.
67 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
72 /** \brief Instruction Synchronization Barrier
74 Instruction Synchronization Barrier flushes the pipeline in the processor,
75 so that all instructions following the ISB are fetched from cache or
76 memory, after the instruction has been completed.
78 #define __ISB() __isb(0xF)
81 /** \brief Data Synchronization Barrier
83 This function acts as a special kind of Data Memory Barrier.
84 It completes when all explicit memory accesses before this instruction complete.
86 #define __DSB() __dsb(0xF)
89 /** \brief Data Memory Barrier
91 This function ensures the apparent order of the explicit memory operations before
92 and after the instruction, without ensuring their completion.
94 #define __DMB() __dmb(0xF)
97 /** \brief Reverse byte order (32 bit)
99 This function reverses the byte order in integer value.
101 \param [in] value Value to reverse
102 \return Reversed value
107 /** \brief Reverse byte order (16 bit)
109 This function reverses the byte order in two unsigned short values.
111 \param [in] value Value to reverse
112 \return Reversed value
114 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
121 /** \brief Reverse byte order in signed short value
123 This function reverses the byte order in a signed short value with sign extension to integer.
125 \param [in] value Value to reverse
126 \return Reversed value
128 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
135 /** \brief Rotate Right in unsigned value (32 bit)
137 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
139 \param [in] value Value to rotate
140 \param [in] value Number of Bits to rotate
141 \return Rotated value
146 #if (__CORTEX_M >= 0x03)
148 /** \brief Reverse bit order of value
150 This function reverses the bit order of the given value.
152 \param [in] value Value to reverse
153 \return Reversed value
155 #define __RBIT __rbit
158 /** \brief LDR Exclusive (8 bit)
160 This function performs a exclusive LDR command for 8 bit value.
162 \param [in] ptr Pointer to data
163 \return value of type uint8_t at (*ptr)
165 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
168 /** \brief LDR Exclusive (16 bit)
170 This function performs a exclusive LDR command for 16 bit values.
172 \param [in] ptr Pointer to data
173 \return value of type uint16_t at (*ptr)
175 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
178 /** \brief LDR Exclusive (32 bit)
180 This function performs a exclusive LDR command for 32 bit values.
182 \param [in] ptr Pointer to data
183 \return value of type uint32_t at (*ptr)
185 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
188 /** \brief STR Exclusive (8 bit)
190 This function performs a exclusive STR command for 8 bit values.
192 \param [in] value Value to store
193 \param [in] ptr Pointer to location
194 \return 0 Function succeeded
195 \return 1 Function failed
197 #define __STREXB(value, ptr) __strex(value, ptr)
200 /** \brief STR Exclusive (16 bit)
202 This function performs a exclusive STR command for 16 bit values.
204 \param [in] value Value to store
205 \param [in] ptr Pointer to location
206 \return 0 Function succeeded
207 \return 1 Function failed
209 #define __STREXH(value, ptr) __strex(value, ptr)
212 /** \brief STR Exclusive (32 bit)
214 This function performs a exclusive STR command for 32 bit values.
216 \param [in] value Value to store
217 \param [in] ptr Pointer to location
218 \return 0 Function succeeded
219 \return 1 Function failed
221 #define __STREXW(value, ptr) __strex(value, ptr)
224 /** \brief Remove the exclusive lock
226 This function removes the exclusive lock which is created by LDREX.
229 #define __CLREX __clrex
232 /** \brief Signed Saturate
234 This function saturates a signed value.
236 \param [in] value Value to be saturated
237 \param [in] sat Bit position to saturate to (1..32)
238 \return Saturated value
240 #define __SSAT __ssat
243 /** \brief Unsigned Saturate
245 This function saturates an unsigned value.
247 \param [in] value Value to be saturated
248 \param [in] sat Bit position to saturate to (0..31)
249 \return Saturated value
251 #define __USAT __usat
254 /** \brief Count leading zeros
256 This function counts the number of leading zeros of a data value.
258 \param [in] value Value to count the leading zeros
259 \return number of leading zeros in value
263 #endif /* (__CORTEX_M >= 0x03) */
267 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
268 /* IAR iccarm specific functions */
270 #include <cmsis_iar.h>
273 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
274 /* TI CCS specific functions */
276 #include <cmsis_ccs.h>
279 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
280 /* GNU gcc specific functions */
282 /** \brief No Operation
284 No Operation does nothing. This instruction can be used for code alignment purposes.
286 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
288 __ASM volatile ("nop");
292 /** \brief Wait For Interrupt
294 Wait For Interrupt is a hint instruction that suspends execution
295 until one of a number of events occurs.
297 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
299 __ASM volatile ("wfi");
303 /** \brief Wait For Event
305 Wait For Event is a hint instruction that permits the processor to enter
306 a low-power state until one of a number of events occurs.
308 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
310 __ASM volatile ("wfe");
314 /** \brief Send Event
316 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
318 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
320 __ASM volatile ("sev");
324 /** \brief Instruction Synchronization Barrier
326 Instruction Synchronization Barrier flushes the pipeline in the processor,
327 so that all instructions following the ISB are fetched from cache or
328 memory, after the instruction has been completed.
330 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
332 __ASM volatile ("isb");
336 /** \brief Data Synchronization Barrier
338 This function acts as a special kind of Data Memory Barrier.
339 It completes when all explicit memory accesses before this instruction complete.
341 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
343 __ASM volatile ("dsb");
347 /** \brief Data Memory Barrier
349 This function ensures the apparent order of the explicit memory operations before
350 and after the instruction, without ensuring their completion.
352 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
354 __ASM volatile ("dmb");
358 /** \brief Reverse byte order (32 bit)
360 This function reverses the byte order in integer value.
362 \param [in] value Value to reverse
363 \return Reversed value
365 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
369 __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
374 /** \brief Reverse byte order (16 bit)
376 This function reverses the byte order in two unsigned short values.
378 \param [in] value Value to reverse
379 \return Reversed value
381 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
385 __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
390 /** \brief Reverse byte order in signed short value
392 This function reverses the byte order in a signed short value with sign extension to integer.
394 \param [in] value Value to reverse
395 \return Reversed value
397 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
401 __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
406 /** \brief Rotate Right in unsigned value (32 bit)
408 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
410 \param [in] value Value to rotate
411 \param [in] value Number of Bits to rotate
412 \return Rotated value
414 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
417 __ASM volatile ("ror %0, %0, %1" : "+r" (op1) : "r" (op2) );
422 #if (__CORTEX_M >= 0x03)
424 /** \brief Reverse bit order of value
426 This function reverses the bit order of the given value.
428 \param [in] value Value to reverse
429 \return Reversed value
431 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
435 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
440 /** \brief LDR Exclusive (8 bit)
442 This function performs a exclusive LDR command for 8 bit value.
444 \param [in] ptr Pointer to data
445 \return value of type uint8_t at (*ptr)
447 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
451 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
456 /** \brief LDR Exclusive (16 bit)
458 This function performs a exclusive LDR command for 16 bit values.
460 \param [in] ptr Pointer to data
461 \return value of type uint16_t at (*ptr)
463 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
467 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
472 /** \brief LDR Exclusive (32 bit)
474 This function performs a exclusive LDR command for 32 bit values.
476 \param [in] ptr Pointer to data
477 \return value of type uint32_t at (*ptr)
479 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
483 __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
488 /** \brief STR Exclusive (8 bit)
490 This function performs a exclusive STR command for 8 bit values.
492 \param [in] value Value to store
493 \param [in] ptr Pointer to location
494 \return 0 Function succeeded
495 \return 1 Function failed
497 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
501 __ASM volatile ("strexb %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
506 /** \brief STR Exclusive (16 bit)
508 This function performs a exclusive STR command for 16 bit values.
510 \param [in] value Value to store
511 \param [in] ptr Pointer to location
512 \return 0 Function succeeded
513 \return 1 Function failed
515 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
519 __ASM volatile ("strexh %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
524 /** \brief STR Exclusive (32 bit)
526 This function performs a exclusive STR command for 32 bit values.
528 \param [in] value Value to store
529 \param [in] ptr Pointer to location
530 \return 0 Function succeeded
531 \return 1 Function failed
533 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
537 __ASM volatile ("strex %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
542 /** \brief Remove the exclusive lock
544 This function removes the exclusive lock which is created by LDREX.
547 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
549 __ASM volatile ("clrex");
553 /** \brief Signed Saturate
555 This function saturates a signed value.
557 \param [in] value Value to be saturated
558 \param [in] sat Bit position to saturate to (1..32)
559 \return Saturated value
561 #define __SSAT(ARG1,ARG2) \
563 uint32_t __RES, __ARG1 = (ARG1); \
564 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
569 /** \brief Unsigned Saturate
571 This function saturates an unsigned value.
573 \param [in] value Value to be saturated
574 \param [in] sat Bit position to saturate to (0..31)
575 \return Saturated value
577 #define __USAT(ARG1,ARG2) \
579 uint32_t __RES, __ARG1 = (ARG1); \
580 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
585 /** \brief Count leading zeros
587 This function counts the number of leading zeros of a data value.
589 \param [in] value Value to count the leading zeros
590 \return number of leading zeros in value
592 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
596 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
600 #endif /* (__CORTEX_M >= 0x03) */
605 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
606 /* TASKING carm specific functions */
609 * The CMSIS functions have been implemented as intrinsics in the compiler.
610 * Please use "carm -?i" to get an up to date list of all intrinsics,
611 * Including the CMSIS ones.
616 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
618 #endif /* __CORE_CMINSTR_H */