1 /**************************************************************************//**
\r
2 * @file core_cmInstr.h
\r
3 * @brief CMSIS Cortex-M Core Instruction Access Header File
\r
5 * @date 28. August 2014
\r
9 ******************************************************************************/
\r
10 /* Copyright (c) 2009 - 2014 ARM LIMITED
\r
12 All rights reserved.
\r
13 Redistribution and use in source and binary forms, with or without
\r
14 modification, are permitted provided that the following conditions are met:
\r
15 - Redistributions of source code must retain the above copyright
\r
16 notice, this list of conditions and the following disclaimer.
\r
17 - Redistributions in binary form must reproduce the above copyright
\r
18 notice, this list of conditions and the following disclaimer in the
\r
19 documentation and/or other materials provided with the distribution.
\r
20 - Neither the name of ARM nor the names of its contributors may be used
\r
21 to endorse or promote products derived from this software without
\r
22 specific prior written permission.
\r
24 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
\r
25 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
\r
26 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
\r
27 ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
\r
28 LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
\r
29 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
\r
30 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
\r
31 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
\r
32 CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
\r
33 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
\r
34 POSSIBILITY OF SUCH DAMAGE.
\r
35 ---------------------------------------------------------------------------*/
\r
38 #ifndef __CORE_CMINSTR_H
\r
39 #define __CORE_CMINSTR_H
\r
42 /* ########################## Core Instruction Access ######################### */
\r
43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
\r
44 Access to dedicated instructions
\r
48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
\r
49 /* ARM armcc specific functions */
\r
51 #if (__ARMCC_VERSION < 400677)
\r
52 #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
\r
56 /** \brief No Operation
\r
58 No Operation does nothing. This instruction can be used for code alignment purposes.
\r
63 /** \brief Wait For Interrupt
\r
65 Wait For Interrupt is a hint instruction that suspends execution
\r
66 until one of a number of events occurs.
\r
71 /** \brief Wait For Event
\r
73 Wait For Event is a hint instruction that permits the processor to enter
\r
74 a low-power state until one of a number of events occurs.
\r
79 /** \brief Send Event
\r
81 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
\r
86 /** \brief Instruction Synchronization Barrier
\r
88 Instruction Synchronization Barrier flushes the pipeline in the processor,
\r
89 so that all instructions following the ISB are fetched from cache or
\r
90 memory, after the instruction has been completed.
\r
92 #define __ISB() __isb(0xF)
\r
95 /** \brief Data Synchronization Barrier
\r
97 This function acts as a special kind of Data Memory Barrier.
\r
98 It completes when all explicit memory accesses before this instruction complete.
\r
100 #define __DSB() __dsb(0xF)
\r
103 /** \brief Data Memory Barrier
\r
105 This function ensures the apparent order of the explicit memory operations before
\r
106 and after the instruction, without ensuring their completion.
\r
108 #define __DMB() __dmb(0xF)
\r
111 /** \brief Reverse byte order (32 bit)
\r
113 This function reverses the byte order in integer value.
\r
115 \param [in] value Value to reverse
\r
116 \return Reversed value
\r
118 #define __REV __rev
\r
121 /** \brief Reverse byte order (16 bit)
\r
123 This function reverses the byte order in two unsigned short values.
\r
125 \param [in] value Value to reverse
\r
126 \return Reversed value
\r
128 #ifndef __NO_EMBEDDED_ASM
\r
129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
\r
136 /** \brief Reverse byte order in signed short value
\r
138 This function reverses the byte order in a signed short value with sign extension to integer.
\r
140 \param [in] value Value to reverse
\r
141 \return Reversed value
\r
143 #ifndef __NO_EMBEDDED_ASM
\r
144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
\r
152 /** \brief Rotate Right in unsigned value (32 bit)
\r
154 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\r
156 \param [in] value Value to rotate
\r
157 \param [in] value Number of Bits to rotate
\r
158 \return Rotated value
\r
160 #define __ROR __ror
\r
163 /** \brief Breakpoint
\r
165 This function causes the processor to enter Debug state.
\r
166 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
\r
168 \param [in] value is ignored by the processor.
\r
169 If required, a debugger can use it to store additional information about the breakpoint.
\r
171 #define __BKPT(value) __breakpoint(value)
\r
174 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
\r
176 /** \brief Reverse bit order of value
\r
178 This function reverses the bit order of the given value.
\r
180 \param [in] value Value to reverse
\r
181 \return Reversed value
\r
183 #define __RBIT __rbit
\r
186 /** \brief LDR Exclusive (8 bit)
\r
188 This function executes a exclusive LDR instruction for 8 bit value.
\r
190 \param [in] ptr Pointer to data
\r
191 \return value of type uint8_t at (*ptr)
\r
193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
\r
196 /** \brief LDR Exclusive (16 bit)
\r
198 This function executes a exclusive LDR instruction for 16 bit values.
\r
200 \param [in] ptr Pointer to data
\r
201 \return value of type uint16_t at (*ptr)
\r
203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
\r
206 /** \brief LDR Exclusive (32 bit)
\r
208 This function executes a exclusive LDR instruction for 32 bit values.
\r
210 \param [in] ptr Pointer to data
\r
211 \return value of type uint32_t at (*ptr)
\r
213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
\r
216 /** \brief STR Exclusive (8 bit)
\r
218 This function executes a exclusive STR instruction for 8 bit values.
\r
220 \param [in] value Value to store
\r
221 \param [in] ptr Pointer to location
\r
222 \return 0 Function succeeded
\r
223 \return 1 Function failed
\r
225 #define __STREXB(value, ptr) __strex(value, ptr)
\r
228 /** \brief STR Exclusive (16 bit)
\r
230 This function executes a exclusive STR instruction for 16 bit values.
\r
232 \param [in] value Value to store
\r
233 \param [in] ptr Pointer to location
\r
234 \return 0 Function succeeded
\r
235 \return 1 Function failed
\r
237 #define __STREXH(value, ptr) __strex(value, ptr)
\r
240 /** \brief STR Exclusive (32 bit)
\r
242 This function executes a exclusive STR instruction for 32 bit values.
\r
244 \param [in] value Value to store
\r
245 \param [in] ptr Pointer to location
\r
246 \return 0 Function succeeded
\r
247 \return 1 Function failed
\r
249 #define __STREXW(value, ptr) __strex(value, ptr)
\r
252 /** \brief Remove the exclusive lock
\r
254 This function removes the exclusive lock which is created by LDREX.
\r
257 #define __CLREX __clrex
\r
260 /** \brief Signed Saturate
\r
262 This function saturates a signed value.
\r
264 \param [in] value Value to be saturated
\r
265 \param [in] sat Bit position to saturate to (1..32)
\r
266 \return Saturated value
\r
268 #define __SSAT __ssat
\r
271 /** \brief Unsigned Saturate
\r
273 This function saturates an unsigned value.
\r
275 \param [in] value Value to be saturated
\r
276 \param [in] sat Bit position to saturate to (0..31)
\r
277 \return Saturated value
\r
279 #define __USAT __usat
\r
282 /** \brief Count leading zeros
\r
284 This function counts the number of leading zeros of a data value.
\r
286 \param [in] value Value to count the leading zeros
\r
287 \return number of leading zeros in value
\r
289 #define __CLZ __clz
\r
292 /** \brief Rotate Right with Extend (32 bit)
\r
294 This function moves each bit of a bitstring right by one bit. The carry input is shifted in at the left end of the bitstring.
\r
296 \param [in] value Value to rotate
\r
297 \return Rotated value
\r
299 #ifndef __NO_EMBEDDED_ASM
\r
300 __attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
\r
308 /** \brief LDRT Unprivileged (8 bit)
\r
310 This function executes a Unprivileged LDRT instruction for 8 bit value.
\r
312 \param [in] ptr Pointer to data
\r
313 \return value of type uint8_t at (*ptr)
\r
315 #define __LDRBT(ptr) ((uint8_t ) __ldrt(ptr))
\r
318 /** \brief LDRT Unprivileged (16 bit)
\r
320 This function executes a Unprivileged LDRT instruction for 16 bit values.
\r
322 \param [in] ptr Pointer to data
\r
323 \return value of type uint16_t at (*ptr)
\r
325 #define __LDRHT(ptr) ((uint16_t) __ldrt(ptr))
\r
328 /** \brief LDRT Unprivileged (32 bit)
\r
330 This function executes a Unprivileged LDRT instruction for 32 bit values.
\r
332 \param [in] ptr Pointer to data
\r
333 \return value of type uint32_t at (*ptr)
\r
335 #define __LDRT(ptr) ((uint32_t ) __ldrt(ptr))
\r
338 /** \brief STRT Unprivileged (8 bit)
\r
340 This function executes a Unprivileged STRT instruction for 8 bit values.
\r
342 \param [in] value Value to store
\r
343 \param [in] ptr Pointer to location
\r
345 #define __STRBT(value, ptr) __strt(value, ptr)
\r
348 /** \brief STRT Unprivileged (16 bit)
\r
350 This function executes a Unprivileged STRT instruction for 16 bit values.
\r
352 \param [in] value Value to store
\r
353 \param [in] ptr Pointer to location
\r
355 #define __STRHT(value, ptr) __strt(value, ptr)
\r
358 /** \brief STRT Unprivileged (32 bit)
\r
360 This function executes a Unprivileged STRT instruction for 32 bit values.
\r
362 \param [in] value Value to store
\r
363 \param [in] ptr Pointer to location
\r
365 #define __STRT(value, ptr) __strt(value, ptr)
\r
367 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
\r
370 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
\r
371 /* GNU gcc specific functions */
\r
373 /* Define macros for porting to both thumb1 and thumb2.
\r
374 * For thumb1, use low register (r0-r7), specified by constrant "l"
\r
375 * Otherwise, use general registers, specified by constrant "r" */
\r
376 #if defined (__thumb__) && !defined (__thumb2__)
\r
377 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
\r
378 #define __CMSIS_GCC_USE_REG(r) "l" (r)
\r
380 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
\r
381 #define __CMSIS_GCC_USE_REG(r) "r" (r)
\r
384 /** \brief No Operation
\r
386 No Operation does nothing. This instruction can be used for code alignment purposes.
\r
388 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
\r
390 __ASM volatile ("nop");
\r
394 /** \brief Wait For Interrupt
\r
396 Wait For Interrupt is a hint instruction that suspends execution
\r
397 until one of a number of events occurs.
\r
399 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
\r
401 __ASM volatile ("wfi");
\r
405 /** \brief Wait For Event
\r
407 Wait For Event is a hint instruction that permits the processor to enter
\r
408 a low-power state until one of a number of events occurs.
\r
410 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
\r
412 __ASM volatile ("wfe");
\r
416 /** \brief Send Event
\r
418 Send Event is a hint instruction. It causes an event to be signaled to the CPU.
\r
420 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
\r
422 __ASM volatile ("sev");
\r
426 /** \brief Instruction Synchronization Barrier
\r
428 Instruction Synchronization Barrier flushes the pipeline in the processor,
\r
429 so that all instructions following the ISB are fetched from cache or
\r
430 memory, after the instruction has been completed.
\r
432 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
\r
434 __ASM volatile ("isb");
\r
438 /** \brief Data Synchronization Barrier
\r
440 This function acts as a special kind of Data Memory Barrier.
\r
441 It completes when all explicit memory accesses before this instruction complete.
\r
443 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
\r
445 __ASM volatile ("dsb");
\r
449 /** \brief Data Memory Barrier
\r
451 This function ensures the apparent order of the explicit memory operations before
\r
452 and after the instruction, without ensuring their completion.
\r
454 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
\r
456 __ASM volatile ("dmb");
\r
460 /** \brief Reverse byte order (32 bit)
\r
462 This function reverses the byte order in integer value.
\r
464 \param [in] value Value to reverse
\r
465 \return Reversed value
\r
467 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
\r
469 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
\r
470 return __builtin_bswap32(value);
\r
474 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
480 /** \brief Reverse byte order (16 bit)
\r
482 This function reverses the byte order in two unsigned short values.
\r
484 \param [in] value Value to reverse
\r
485 \return Reversed value
\r
487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
\r
491 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
496 /** \brief Reverse byte order in signed short value
\r
498 This function reverses the byte order in a signed short value with sign extension to integer.
\r
500 \param [in] value Value to reverse
\r
501 \return Reversed value
\r
503 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
\r
505 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
506 return (short)__builtin_bswap16(value);
\r
510 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
516 /** \brief Rotate Right in unsigned value (32 bit)
\r
518 This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\r
520 \param [in] value Value to rotate
\r
521 \param [in] value Number of Bits to rotate
\r
522 \return Rotated value
\r
524 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
\r
526 return (op1 >> op2) | (op1 << (32 - op2));
\r
530 /** \brief Breakpoint
\r
532 This function causes the processor to enter Debug state.
\r
533 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
\r
535 \param [in] value is ignored by the processor.
\r
536 If required, a debugger can use it to store additional information about the breakpoint.
\r
538 #define __BKPT(value) __ASM volatile ("bkpt "#value)
\r
541 #if (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300)
\r
543 /** \brief Reverse bit order of value
\r
545 This function reverses the bit order of the given value.
\r
547 \param [in] value Value to reverse
\r
548 \return Reversed value
\r
550 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
\r
554 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
\r
559 /** \brief LDR Exclusive (8 bit)
\r
561 This function executes a exclusive LDR instruction for 8 bit value.
\r
563 \param [in] ptr Pointer to data
\r
564 \return value of type uint8_t at (*ptr)
\r
566 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
\r
570 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
571 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
\r
573 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
574 accepted by assembler. So has to use following less efficient pattern.
\r
576 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
\r
578 return ((uint8_t) result); /* Add explicit type cast here */
\r
582 /** \brief LDR Exclusive (16 bit)
\r
584 This function executes a exclusive LDR instruction for 16 bit values.
\r
586 \param [in] ptr Pointer to data
\r
587 \return value of type uint16_t at (*ptr)
\r
589 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
\r
593 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
594 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
\r
596 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
597 accepted by assembler. So has to use following less efficient pattern.
\r
599 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
\r
601 return ((uint16_t) result); /* Add explicit type cast here */
\r
605 /** \brief LDR Exclusive (32 bit)
\r
607 This function executes a exclusive LDR instruction for 32 bit values.
\r
609 \param [in] ptr Pointer to data
\r
610 \return value of type uint32_t at (*ptr)
\r
612 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
\r
616 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
\r
621 /** \brief STR Exclusive (8 bit)
\r
623 This function executes a exclusive STR instruction for 8 bit values.
\r
625 \param [in] value Value to store
\r
626 \param [in] ptr Pointer to location
\r
627 \return 0 Function succeeded
\r
628 \return 1 Function failed
\r
630 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
\r
634 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
\r
639 /** \brief STR Exclusive (16 bit)
\r
641 This function executes a exclusive STR instruction for 16 bit values.
\r
643 \param [in] value Value to store
\r
644 \param [in] ptr Pointer to location
\r
645 \return 0 Function succeeded
\r
646 \return 1 Function failed
\r
648 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
\r
652 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
\r
657 /** \brief STR Exclusive (32 bit)
\r
659 This function executes a exclusive STR instruction for 32 bit values.
\r
661 \param [in] value Value to store
\r
662 \param [in] ptr Pointer to location
\r
663 \return 0 Function succeeded
\r
664 \return 1 Function failed
\r
666 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
\r
670 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
\r
675 /** \brief Remove the exclusive lock
\r
677 This function removes the exclusive lock which is created by LDREX.
\r
680 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
\r
682 __ASM volatile ("clrex" ::: "memory");
\r
686 /** \brief Signed Saturate
\r
688 This function saturates a signed value.
\r
690 \param [in] value Value to be saturated
\r
691 \param [in] sat Bit position to saturate to (1..32)
\r
692 \return Saturated value
\r
694 #define __SSAT(ARG1,ARG2) \
\r
696 uint32_t __RES, __ARG1 = (ARG1); \
\r
697 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
702 /** \brief Unsigned Saturate
\r
704 This function saturates an unsigned value.
\r
706 \param [in] value Value to be saturated
\r
707 \param [in] sat Bit position to saturate to (0..31)
\r
708 \return Saturated value
\r
710 #define __USAT(ARG1,ARG2) \
\r
712 uint32_t __RES, __ARG1 = (ARG1); \
\r
713 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
718 /** \brief Count leading zeros
\r
720 This function counts the number of leading zeros of a data value.
\r
722 \param [in] value Value to count the leading zeros
\r
723 \return number of leading zeros in value
\r
725 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
\r
729 __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
\r
730 return ((uint8_t) result); /* Add explicit type cast here */
\r
734 /** \brief Rotate Right with Extend (32 bit)
\r
736 This function moves each bit of a bitstring right by one bit. The carry input is shifted in at the left end of the bitstring.
\r
738 \param [in] value Value to rotate
\r
739 \return Rotated value
\r
741 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RRX(uint32_t value)
\r
745 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
750 /** \brief LDRT Unprivileged (8 bit)
\r
752 This function executes a Unprivileged LDRT instruction for 8 bit value.
\r
754 \param [in] ptr Pointer to data
\r
755 \return value of type uint8_t at (*ptr)
\r
757 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDRBT(volatile uint8_t *addr)
\r
761 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
762 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*addr) );
\r
764 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
765 accepted by assembler. So has to use following less efficient pattern.
\r
767 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
\r
769 return ((uint8_t) result); /* Add explicit type cast here */
\r
773 /** \brief LDRT Unprivileged (16 bit)
\r
775 This function executes a Unprivileged LDRT instruction for 16 bit values.
\r
777 \param [in] ptr Pointer to data
\r
778 \return value of type uint16_t at (*ptr)
\r
780 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDRHT(volatile uint16_t *addr)
\r
784 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
785 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*addr) );
\r
787 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
788 accepted by assembler. So has to use following less efficient pattern.
\r
790 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
\r
792 return ((uint16_t) result); /* Add explicit type cast here */
\r
796 /** \brief LDRT Unprivileged (32 bit)
\r
798 This function executes a Unprivileged LDRT instruction for 32 bit values.
\r
800 \param [in] ptr Pointer to data
\r
801 \return value of type uint32_t at (*ptr)
\r
803 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDRT(volatile uint32_t *addr)
\r
807 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*addr) );
\r
812 /** \brief STRT Unprivileged (8 bit)
\r
814 This function executes a Unprivileged STRT instruction for 8 bit values.
\r
816 \param [in] value Value to store
\r
817 \param [in] ptr Pointer to location
\r
819 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRBT(uint8_t value, volatile uint8_t *addr)
\r
821 __ASM volatile ("strbt %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
\r
825 /** \brief STRT Unprivileged (16 bit)
\r
827 This function executes a Unprivileged STRT instruction for 16 bit values.
\r
829 \param [in] value Value to store
\r
830 \param [in] ptr Pointer to location
\r
832 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRHT(uint16_t value, volatile uint16_t *addr)
\r
834 __ASM volatile ("strht %1, %0" : "=Q" (*addr) : "r" ((uint32_t)value) );
\r
838 /** \brief STRT Unprivileged (32 bit)
\r
840 This function executes a Unprivileged STRT instruction for 32 bit values.
\r
842 \param [in] value Value to store
\r
843 \param [in] ptr Pointer to location
\r
845 __attribute__( ( always_inline ) ) __STATIC_INLINE void __STRT(uint32_t value, volatile uint32_t *addr)
\r
847 __ASM volatile ("strt %1, %0" : "=Q" (*addr) : "r" (value) );
\r
850 #endif /* (__CORTEX_M >= 0x03) || (__CORTEX_SC >= 300) */
\r
853 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
\r
854 /* IAR iccarm specific functions */
\r
855 #include <cmsis_iar.h>
\r
858 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
\r
859 /* TI CCS specific functions */
\r
860 #include <cmsis_ccs.h>
\r
863 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
\r
864 /* TASKING carm specific functions */
\r
866 * The CMSIS functions have been implemented as intrinsics in the compiler.
\r
867 * Please use "carm -?i" to get an up to date list of all intrinsics,
\r
868 * Including the CMSIS ones.
\r
872 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/
\r
873 /* Cosmic specific functions */
\r
874 #include <cmsis_csm.h>
\r
878 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
\r
880 #endif /* __CORE_CMINSTR_H */
\r