1 /**************************************************************************//**
\r
2 * @file core_cmInstr.h
\r
3 * @brief CMSIS Cortex-M Core Instruction Access Header File
\r
5 * @date 06. March 2013
\r
8 * Copyright (C) 2009-2014 ARM Limited. All rights reserved.
\r
11 * ARM Limited (ARM) is supplying this software for use with Cortex-M
\r
12 * processor based microcontrollers. This file can be freely distributed
\r
13 * within development tools that are supporting such ARM based processors.
\r
16 * THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
\r
17 * OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
\r
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
\r
19 * ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
\r
20 * CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
\r
22 ******************************************************************************/
\r
24 #ifndef __CORE_CMINSTR_H
\r
25 #define __CORE_CMINSTR_H
\r
28 /* ########################## Core Instruction Access ######################### */
\r
29 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
\r
30 Access to dedicated instructions
\r
34 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
\r
35 /* ARM armcc specific functions */
\r
37 #if (__ARMCC_VERSION < 400677)
\r
38 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
\r
42 /** \brief No Operation
\r
44 No Operation does nothing. This instruction can be used for code alignment purposes.
\r
49 /** \brief Wait For Interrupt
\r
51 Wait For Interrupt is a hint instruction that suspends execution
\r
52 until one of a number of events occurs.
\r
57 /** \brief Wait For Event
\r
59 Wait For Event is a hint instruction that permits the processor to enter
\r
60 a low-power state until one of a number of events occurs.
\r
65 /** \brief Send Event
\r
67 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
\r
72 /** \brief Instruction Synchronization Barrier
\r
74 Instruction Synchronization Barrier flushes the pipeline in the processor,
\r
75 so that all instructions following the ISB are fetched from cache or
\r
76 memory, after the instruction has been completed.
\r
78 #define __ISB() __isb(0xF)
\r
81 /** \brief Data Synchronization Barrier
\r
83 This function acts as a special kind of Data Memory Barrier.
\r
84 It completes when all explicit memory accesses before this instruction complete.
\r
86 #define __DSB() __dsb(0xF)
\r
89 /** \brief Data Memory Barrier
\r
91 This function ensures the apparent order of the explicit memory operations before
\r
92 and after the instruction, without ensuring their completion.
\r
94 #define __DMB() __dmb(0xF)
\r
97 /** \brief Reverse byte order (32 bit)
\r
99 This function reverses the byte order in integer value.
\r
101 \param [in] value Value to reverse
\r
102 \return Reversed value
\r
104 #define __REV __rev
\r
107 /** \brief Reverse byte order (16 bit)
\r
109 This function reverses the byte order in two unsigned short values.
\r
111 \param [in] value Value to reverse
\r
112 \return Reversed value
\r
114 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
\r
121 /** \brief Reverse byte order in signed short value
\r
123 This function reverses the byte order in a signed short value with sign extension to integer.
\r
125 \param [in] value Value to reverse
\r
126 \return Reversed value
\r
128 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
\r
135 /** \brief Rotate Right in unsigned value (32 bit)
\r
137 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\r
139 \param [in] value Value to rotate
\r
140 \param [in] value Number of Bits to rotate
\r
141 \return Rotated value
\r
143 #define __ROR __ror
\r
146 #if (__CORTEX_M >= 0x03)
\r
148 /** \brief Reverse bit order of value
\r
150 This function reverses the bit order of the given value.
\r
152 \param [in] value Value to reverse
\r
153 \return Reversed value
\r
155 #define __RBIT __rbit
\r
158 /** \brief LDR Exclusive (8 bit)
\r
160 This function performs a exclusive LDR command for 8 bit value.
\r
162 \param [in] ptr Pointer to data
\r
163 \return value of type uint8_t at (*ptr)
\r
165 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
\r
168 /** \brief LDR Exclusive (16 bit)
\r
170 This function performs a exclusive LDR command for 16 bit values.
\r
172 \param [in] ptr Pointer to data
\r
173 \return value of type uint16_t at (*ptr)
\r
175 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
\r
178 /** \brief LDR Exclusive (32 bit)
\r
180 This function performs a exclusive LDR command for 32 bit values.
\r
182 \param [in] ptr Pointer to data
\r
183 \return value of type uint32_t at (*ptr)
\r
185 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
\r
188 /** \brief STR Exclusive (8 bit)
\r
190 This function performs a exclusive STR command for 8 bit values.
\r
192 \param [in] value Value to store
\r
193 \param [in] ptr Pointer to location
\r
194 \return 0 Function succeeded
\r
195 \return 1 Function failed
\r
197 #define __STREXB(value, ptr) __strex(value, ptr)
\r
200 /** \brief STR Exclusive (16 bit)
\r
202 This function performs a exclusive STR command for 16 bit values.
\r
204 \param [in] value Value to store
\r
205 \param [in] ptr Pointer to location
\r
206 \return 0 Function succeeded
\r
207 \return 1 Function failed
\r
209 #define __STREXH(value, ptr) __strex(value, ptr)
\r
212 /** \brief STR Exclusive (32 bit)
\r
214 This function performs a exclusive STR command for 32 bit values.
\r
216 \param [in] value Value to store
\r
217 \param [in] ptr Pointer to location
\r
218 \return 0 Function succeeded
\r
219 \return 1 Function failed
\r
221 #define __STREXW(value, ptr) __strex(value, ptr)
\r
224 /** \brief Remove the exclusive lock
\r
226 This function removes the exclusive lock which is created by LDREX.
\r
229 #define __CLREX __clrex
\r
232 /** \brief Signed Saturate
\r
234 This function saturates a signed value.
\r
236 \param [in] value Value to be saturated
\r
237 \param [in] sat Bit position to saturate to (1..32)
\r
238 \return Saturated value
\r
240 #define __SSAT __ssat
\r
243 /** \brief Unsigned Saturate
\r
245 This function saturates an unsigned value.
\r
247 \param [in] value Value to be saturated
\r
248 \param [in] sat Bit position to saturate to (0..31)
\r
249 \return Saturated value
\r
251 #define __USAT __usat
\r
254 /** \brief Count leading zeros
\r
256 This function counts the number of leading zeros of a data value.
\r
258 \param [in] value Value to count the leading zeros
\r
259 \return number of leading zeros in value
\r
261 #define __CLZ __clz
\r
263 #endif /* (__CORTEX_M >= 0x03) */
\r
267 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
\r
268 /* IAR iccarm specific functions */
\r
270 #include <cmsis_iar.h>
\r
273 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
\r
274 /* TI CCS specific functions */
\r
276 #include <cmsis_ccs.h>
\r
279 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
\r
280 /* GNU gcc specific functions */
\r
282 /** \brief No Operation
\r
284 No Operation does nothing. This instruction can be used for code alignment purposes.
\r
286 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
\r
288 __ASM volatile ("nop");
\r
292 /** \brief Wait For Interrupt
\r
294 Wait For Interrupt is a hint instruction that suspends execution
\r
295 until one of a number of events occurs.
\r
297 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
\r
299 __ASM volatile ("wfi");
\r
303 /** \brief Wait For Event
\r
305 Wait For Event is a hint instruction that permits the processor to enter
\r
306 a low-power state until one of a number of events occurs.
\r
308 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
\r
310 __ASM volatile ("wfe");
\r
314 /** \brief Send Event
\r
316 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
\r
318 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
\r
320 __ASM volatile ("sev");
\r
324 /** \brief Instruction Synchronization Barrier
\r
326 Instruction Synchronization Barrier flushes the pipeline in the processor,
\r
327 so that all instructions following the ISB are fetched from cache or
\r
328 memory, after the instruction has been completed.
\r
330 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
\r
332 __ASM volatile ("isb");
\r
336 /** \brief Data Synchronization Barrier
\r
338 This function acts as a special kind of Data Memory Barrier.
\r
339 It completes when all explicit memory accesses before this instruction complete.
\r
341 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
\r
343 __ASM volatile ("dsb");
\r
347 /** \brief Data Memory Barrier
\r
349 This function ensures the apparent order of the explicit memory operations before
\r
350 and after the instruction, without ensuring their completion.
\r
352 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
\r
354 __ASM volatile ("dmb");
\r
358 /** \brief Reverse byte order (32 bit)
\r
360 This function reverses the byte order in integer value.
\r
362 \param [in] value Value to reverse
\r
363 \return Reversed value
\r
365 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
\r
369 __ASM volatile ("rev %0, %1" : "=r" (result) : "r" (value) );
\r
374 /** \brief Reverse byte order (16 bit)
\r
376 This function reverses the byte order in two unsigned short values.
\r
378 \param [in] value Value to reverse
\r
379 \return Reversed value
\r
381 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
\r
385 __ASM volatile ("rev16 %0, %1" : "=r" (result) : "r" (value) );
\r
390 /** \brief Reverse byte order in signed short value
\r
392 This function reverses the byte order in a signed short value with sign extension to integer.
\r
394 \param [in] value Value to reverse
\r
395 \return Reversed value
\r
397 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
\r
401 __ASM volatile ("revsh %0, %1" : "=r" (result) : "r" (value) );
\r
406 /** \brief Rotate Right in unsigned value (32 bit)
\r
408 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\r
410 \param [in] value Value to rotate
\r
411 \param [in] value Number of Bits to rotate
\r
412 \return Rotated value
\r
414 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
\r
417 __ASM volatile ("ror %0, %0, %1" : "+r" (op1) : "r" (op2) );
\r
422 #if (__CORTEX_M >= 0x03)
\r
424 /** \brief Reverse bit order of value
\r
426 This function reverses the bit order of the given value.
\r
428 \param [in] value Value to reverse
\r
429 \return Reversed value
\r
431 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
\r
435 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
\r
440 /** \brief LDR Exclusive (8 bit)
\r
442 This function performs a exclusive LDR command for 8 bit value.
\r
444 \param [in] ptr Pointer to data
\r
445 \return value of type uint8_t at (*ptr)
\r
447 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
\r
451 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) );
\r
456 /** \brief LDR Exclusive (16 bit)
\r
458 This function performs a exclusive LDR command for 16 bit values.
\r
460 \param [in] ptr Pointer to data
\r
461 \return value of type uint16_t at (*ptr)
\r
463 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
\r
467 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) );
\r
472 /** \brief LDR Exclusive (32 bit)
\r
474 This function performs a exclusive LDR command for 32 bit values.
\r
476 \param [in] ptr Pointer to data
\r
477 \return value of type uint32_t at (*ptr)
\r
479 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
\r
483 __ASM volatile ("ldrex %0, [%1]" : "=r" (result) : "r" (addr) );
\r
488 /** \brief STR Exclusive (8 bit)
\r
490 This function performs a exclusive STR command for 8 bit values.
\r
492 \param [in] value Value to store
\r
493 \param [in] ptr Pointer to location
\r
494 \return 0 Function succeeded
\r
495 \return 1 Function failed
\r
497 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
\r
501 __ASM volatile ("strexb %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
\r
506 /** \brief STR Exclusive (16 bit)
\r
508 This function performs a exclusive STR command for 16 bit values.
\r
510 \param [in] value Value to store
\r
511 \param [in] ptr Pointer to location
\r
512 \return 0 Function succeeded
\r
513 \return 1 Function failed
\r
515 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
\r
519 __ASM volatile ("strexh %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
\r
524 /** \brief STR Exclusive (32 bit)
\r
526 This function performs a exclusive STR command for 32 bit values.
\r
528 \param [in] value Value to store
\r
529 \param [in] ptr Pointer to location
\r
530 \return 0 Function succeeded
\r
531 \return 1 Function failed
\r
533 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
\r
537 __ASM volatile ("strex %0, %2, [%1]" : "=&r" (result) : "r" (addr), "r" (value) );
\r
542 /** \brief Remove the exclusive lock
\r
544 This function removes the exclusive lock which is created by LDREX.
\r
547 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
\r
549 __ASM volatile ("clrex");
\r
553 /** \brief Signed Saturate
\r
555 This function saturates a signed value.
\r
557 \param [in] value Value to be saturated
\r
558 \param [in] sat Bit position to saturate to (1..32)
\r
559 \return Saturated value
\r
561 #define __SSAT(ARG1,ARG2) \
\r
563 uint32_t __RES, __ARG1 = (ARG1); \
\r
564 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
569 /** \brief Unsigned Saturate
\r
571 This function saturates an unsigned value.
\r
573 \param [in] value Value to be saturated
\r
574 \param [in] sat Bit position to saturate to (0..31)
\r
575 \return Saturated value
\r
577 #define __USAT(ARG1,ARG2) \
\r
579 uint32_t __RES, __ARG1 = (ARG1); \
\r
580 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
585 /** \brief Count leading zeros
\r
587 This function counts the number of leading zeros of a data value.
\r
589 \param [in] value Value to count the leading zeros
\r
590 \return number of leading zeros in value
\r
592 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
\r
596 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
\r
600 #endif /* (__CORTEX_M >= 0x03) */
\r
605 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
\r
606 /* TASKING carm specific functions */
\r
609 * The CMSIS functions have been implemented as intrinsics in the compiler.
\r
610 * Please use "carm -?i" to get an up to date list of all intrinsics,
\r
611 * Including the CMSIS ones.
\r
616 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
\r
618 #endif /* __CORE_CMINSTR_H */
\r