1 /**************************************************************************//**
\r
3 * @brief CMSIS compiler GCC header file
\r
5 * @date 09. April 2018
\r
6 ******************************************************************************/
\r
8 * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
\r
10 * SPDX-License-Identifier: Apache-2.0
\r
12 * Licensed under the Apache License, Version 2.0 (the License); you may
\r
13 * not use this file except in compliance with the License.
\r
14 * You may obtain a copy of the License at
\r
16 * www.apache.org/licenses/LICENSE-2.0
\r
18 * Unless required by applicable law or agreed to in writing, software
\r
19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
\r
20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
21 * See the License for the specific language governing permissions and
\r
22 * limitations under the License.
\r
25 #ifndef __CMSIS_GCC_H
\r
26 #define __CMSIS_GCC_H
\r
28 /* ignore some GCC warnings */
\r
29 #pragma GCC diagnostic push
\r
30 #pragma GCC diagnostic ignored "-Wsign-conversion"
\r
31 #pragma GCC diagnostic ignored "-Wconversion"
\r
32 #pragma GCC diagnostic ignored "-Wunused-parameter"
\r
34 /* Fallback for __has_builtin */
\r
35 #ifndef __has_builtin
\r
36 #define __has_builtin(x) (0)
\r
39 /* CMSIS compiler specific defines */
\r
44 #define __INLINE inline
\r
46 #ifndef __STATIC_INLINE
\r
47 #define __STATIC_INLINE static inline
\r
49 #ifndef __STATIC_FORCEINLINE
\r
50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline
\r
53 #define __NO_RETURN __attribute__((__noreturn__))
\r
56 #define __USED __attribute__((used))
\r
59 #define __WEAK __attribute__((weak))
\r
62 #define __PACKED __attribute__((packed, aligned(1)))
\r
64 #ifndef __PACKED_STRUCT
\r
65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
\r
67 #ifndef __PACKED_UNION
\r
68 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
\r
70 #ifndef __UNALIGNED_UINT32 /* deprecated */
\r
71 #pragma GCC diagnostic push
\r
72 #pragma GCC diagnostic ignored "-Wpacked"
\r
73 #pragma GCC diagnostic ignored "-Wattributes"
\r
74 struct __attribute__((packed)) T_UINT32 { uint32_t v; };
\r
75 #pragma GCC diagnostic pop
\r
76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v)
\r
78 #ifndef __UNALIGNED_UINT16_WRITE
\r
79 #pragma GCC diagnostic push
\r
80 #pragma GCC diagnostic ignored "-Wpacked"
\r
81 #pragma GCC diagnostic ignored "-Wattributes"
\r
82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
\r
83 #pragma GCC diagnostic pop
\r
84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
\r
86 #ifndef __UNALIGNED_UINT16_READ
\r
87 #pragma GCC diagnostic push
\r
88 #pragma GCC diagnostic ignored "-Wpacked"
\r
89 #pragma GCC diagnostic ignored "-Wattributes"
\r
90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
\r
91 #pragma GCC diagnostic pop
\r
92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v)
\r
94 #ifndef __UNALIGNED_UINT32_WRITE
\r
95 #pragma GCC diagnostic push
\r
96 #pragma GCC diagnostic ignored "-Wpacked"
\r
97 #pragma GCC diagnostic ignored "-Wattributes"
\r
98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
\r
99 #pragma GCC diagnostic pop
\r
100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
\r
102 #ifndef __UNALIGNED_UINT32_READ
\r
103 #pragma GCC diagnostic push
\r
104 #pragma GCC diagnostic ignored "-Wpacked"
\r
105 #pragma GCC diagnostic ignored "-Wattributes"
\r
106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
\r
107 #pragma GCC diagnostic pop
\r
108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v)
\r
111 #define __ALIGNED(x) __attribute__((aligned(x)))
\r
114 #define __RESTRICT __restrict
\r
118 /* ########################### Core Function Access ########################### */
\r
119 /** \ingroup CMSIS_Core_FunctionInterface
\r
120 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
\r
125 \brief Enable IRQ Interrupts
\r
126 \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
\r
127 Can only be executed in Privileged modes.
\r
129 __STATIC_FORCEINLINE void __enable_irq(void)
\r
131 __ASM volatile ("cpsie i" : : : "memory");
\r
136 \brief Disable IRQ Interrupts
\r
137 \details Disables IRQ interrupts by setting the I-bit in the CPSR.
\r
138 Can only be executed in Privileged modes.
\r
140 __STATIC_FORCEINLINE void __disable_irq(void)
\r
142 __ASM volatile ("cpsid i" : : : "memory");
\r
147 \brief Get Control Register
\r
148 \details Returns the content of the Control Register.
\r
149 \return Control Register value
\r
151 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void)
\r
155 __ASM volatile ("MRS %0, control" : "=r" (result) );
\r
160 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
162 \brief Get Control Register (non-secure)
\r
163 \details Returns the content of the non-secure Control Register when in secure mode.
\r
164 \return non-secure Control Register value
\r
166 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void)
\r
170 __ASM volatile ("MRS %0, control_ns" : "=r" (result) );
\r
177 \brief Set Control Register
\r
178 \details Writes the given value to the Control Register.
\r
179 \param [in] control Control Register value to set
\r
181 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control)
\r
183 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory");
\r
187 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
189 \brief Set Control Register (non-secure)
\r
190 \details Writes the given value to the non-secure Control Register when in secure state.
\r
191 \param [in] control Control Register value to set
\r
193 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control)
\r
195 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory");
\r
201 \brief Get IPSR Register
\r
202 \details Returns the content of the IPSR Register.
\r
203 \return IPSR Register value
\r
205 __STATIC_FORCEINLINE uint32_t __get_IPSR(void)
\r
209 __ASM volatile ("MRS %0, ipsr" : "=r" (result) );
\r
215 \brief Get APSR Register
\r
216 \details Returns the content of the APSR Register.
\r
217 \return APSR Register value
\r
219 __STATIC_FORCEINLINE uint32_t __get_APSR(void)
\r
223 __ASM volatile ("MRS %0, apsr" : "=r" (result) );
\r
229 \brief Get xPSR Register
\r
230 \details Returns the content of the xPSR Register.
\r
231 \return xPSR Register value
\r
233 __STATIC_FORCEINLINE uint32_t __get_xPSR(void)
\r
237 __ASM volatile ("MRS %0, xpsr" : "=r" (result) );
\r
243 \brief Get Process Stack Pointer
\r
244 \details Returns the current value of the Process Stack Pointer (PSP).
\r
245 \return PSP Register value
\r
247 __STATIC_FORCEINLINE uint32_t __get_PSP(void)
\r
251 __ASM volatile ("MRS %0, psp" : "=r" (result) );
\r
256 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
258 \brief Get Process Stack Pointer (non-secure)
\r
259 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state.
\r
260 \return PSP Register value
\r
262 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void)
\r
266 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) );
\r
273 \brief Set Process Stack Pointer
\r
274 \details Assigns the given value to the Process Stack Pointer (PSP).
\r
275 \param [in] topOfProcStack Process Stack Pointer value to set
\r
277 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack)
\r
279 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : );
\r
283 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
285 \brief Set Process Stack Pointer (non-secure)
\r
286 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state.
\r
287 \param [in] topOfProcStack Process Stack Pointer value to set
\r
289 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack)
\r
291 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : );
\r
297 \brief Get Main Stack Pointer
\r
298 \details Returns the current value of the Main Stack Pointer (MSP).
\r
299 \return MSP Register value
\r
301 __STATIC_FORCEINLINE uint32_t __get_MSP(void)
\r
305 __ASM volatile ("MRS %0, msp" : "=r" (result) );
\r
310 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
312 \brief Get Main Stack Pointer (non-secure)
\r
313 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state.
\r
314 \return MSP Register value
\r
316 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void)
\r
320 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) );
\r
327 \brief Set Main Stack Pointer
\r
328 \details Assigns the given value to the Main Stack Pointer (MSP).
\r
329 \param [in] topOfMainStack Main Stack Pointer value to set
\r
331 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack)
\r
333 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : );
\r
337 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
339 \brief Set Main Stack Pointer (non-secure)
\r
340 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state.
\r
341 \param [in] topOfMainStack Main Stack Pointer value to set
\r
343 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack)
\r
345 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : );
\r
350 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
352 \brief Get Stack Pointer (non-secure)
\r
353 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state.
\r
354 \return SP Register value
\r
356 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void)
\r
360 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) );
\r
366 \brief Set Stack Pointer (non-secure)
\r
367 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state.
\r
368 \param [in] topOfStack Stack Pointer value to set
\r
370 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack)
\r
372 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : );
\r
378 \brief Get Priority Mask
\r
379 \details Returns the current state of the priority mask bit from the Priority Mask Register.
\r
380 \return Priority Mask value
\r
382 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void)
\r
386 __ASM volatile ("MRS %0, primask" : "=r" (result) :: "memory");
\r
391 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
393 \brief Get Priority Mask (non-secure)
\r
394 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state.
\r
395 \return Priority Mask value
\r
397 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void)
\r
401 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) :: "memory");
\r
408 \brief Set Priority Mask
\r
409 \details Assigns the given value to the Priority Mask Register.
\r
410 \param [in] priMask Priority Mask
\r
412 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask)
\r
414 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory");
\r
418 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
420 \brief Set Priority Mask (non-secure)
\r
421 \details Assigns the given value to the non-secure Priority Mask Register when in secure state.
\r
422 \param [in] priMask Priority Mask
\r
424 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask)
\r
426 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory");
\r
431 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
432 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
433 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
\r
436 \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
\r
437 Can only be executed in Privileged modes.
\r
439 __STATIC_FORCEINLINE void __enable_fault_irq(void)
\r
441 __ASM volatile ("cpsie f" : : : "memory");
\r
447 \details Disables FIQ interrupts by setting the F-bit in the CPSR.
\r
448 Can only be executed in Privileged modes.
\r
450 __STATIC_FORCEINLINE void __disable_fault_irq(void)
\r
452 __ASM volatile ("cpsid f" : : : "memory");
\r
457 \brief Get Base Priority
\r
458 \details Returns the current value of the Base Priority register.
\r
459 \return Base Priority register value
\r
461 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void)
\r
465 __ASM volatile ("MRS %0, basepri" : "=r" (result) );
\r
470 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
472 \brief Get Base Priority (non-secure)
\r
473 \details Returns the current value of the non-secure Base Priority register when in secure state.
\r
474 \return Base Priority register value
\r
476 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void)
\r
480 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) );
\r
487 \brief Set Base Priority
\r
488 \details Assigns the given value to the Base Priority register.
\r
489 \param [in] basePri Base Priority value to set
\r
491 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri)
\r
493 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory");
\r
497 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
499 \brief Set Base Priority (non-secure)
\r
500 \details Assigns the given value to the non-secure Base Priority register when in secure state.
\r
501 \param [in] basePri Base Priority value to set
\r
503 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri)
\r
505 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory");
\r
511 \brief Set Base Priority with condition
\r
512 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
\r
513 or the new value increases the BASEPRI priority level.
\r
514 \param [in] basePri Base Priority value to set
\r
516 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri)
\r
518 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory");
\r
523 \brief Get Fault Mask
\r
524 \details Returns the current value of the Fault Mask register.
\r
525 \return Fault Mask register value
\r
527 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void)
\r
531 __ASM volatile ("MRS %0, faultmask" : "=r" (result) );
\r
536 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
538 \brief Get Fault Mask (non-secure)
\r
539 \details Returns the current value of the non-secure Fault Mask register when in secure state.
\r
540 \return Fault Mask register value
\r
542 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void)
\r
546 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) );
\r
553 \brief Set Fault Mask
\r
554 \details Assigns the given value to the Fault Mask register.
\r
555 \param [in] faultMask Fault Mask value to set
\r
557 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask)
\r
559 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory");
\r
563 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
565 \brief Set Fault Mask (non-secure)
\r
566 \details Assigns the given value to the non-secure Fault Mask register when in secure state.
\r
567 \param [in] faultMask Fault Mask value to set
\r
569 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask)
\r
571 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory");
\r
575 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
576 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
577 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
\r
580 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
\r
581 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
\r
584 \brief Get Process Stack Pointer Limit
\r
585 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
586 Stack Pointer Limit register hence zero is returned always in non-secure
\r
589 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM).
\r
590 \return PSPLIM Register value
\r
592 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void)
\r
594 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
\r
595 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
\r
596 // without main extensions, the non-secure PSPLIM is RAZ/WI
\r
600 __ASM volatile ("MRS %0, psplim" : "=r" (result) );
\r
605 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3))
\r
607 \brief Get Process Stack Pointer Limit (non-secure)
\r
608 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
609 Stack Pointer Limit register hence zero is returned always.
\r
611 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\r
612 \return PSPLIM Register value
\r
614 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void)
\r
616 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
\r
617 // without main extensions, the non-secure PSPLIM is RAZ/WI
\r
621 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) );
\r
629 \brief Set Process Stack Pointer Limit
\r
630 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
631 Stack Pointer Limit register hence the write is silently ignored in non-secure
\r
634 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM).
\r
635 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
\r
637 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit)
\r
639 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
\r
640 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
\r
641 // without main extensions, the non-secure PSPLIM is RAZ/WI
\r
642 (void)ProcStackPtrLimit;
\r
644 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit));
\r
649 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
651 \brief Set Process Stack Pointer (non-secure)
\r
652 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
653 Stack Pointer Limit register hence the write is silently ignored.
\r
655 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state.
\r
656 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set
\r
658 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit)
\r
660 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
\r
661 // without main extensions, the non-secure PSPLIM is RAZ/WI
\r
662 (void)ProcStackPtrLimit;
\r
664 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit));
\r
671 \brief Get Main Stack Pointer Limit
\r
672 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
673 Stack Pointer Limit register hence zero is returned always in non-secure
\r
676 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM).
\r
677 \return MSPLIM Register value
\r
679 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void)
\r
681 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
\r
682 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
\r
683 // without main extensions, the non-secure MSPLIM is RAZ/WI
\r
687 __ASM volatile ("MRS %0, msplim" : "=r" (result) );
\r
693 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
695 \brief Get Main Stack Pointer Limit (non-secure)
\r
696 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
697 Stack Pointer Limit register hence zero is returned always.
\r
699 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state.
\r
700 \return MSPLIM Register value
\r
702 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void)
\r
704 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
\r
705 // without main extensions, the non-secure MSPLIM is RAZ/WI
\r
709 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) );
\r
717 \brief Set Main Stack Pointer Limit
\r
718 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
719 Stack Pointer Limit register hence the write is silently ignored in non-secure
\r
722 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM).
\r
723 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set
\r
725 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit)
\r
727 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
\r
728 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
\r
729 // without main extensions, the non-secure MSPLIM is RAZ/WI
\r
730 (void)MainStackPtrLimit;
\r
732 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit));
\r
737 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3))
\r
739 \brief Set Main Stack Pointer Limit (non-secure)
\r
740 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure
\r
741 Stack Pointer Limit register hence the write is silently ignored.
\r
743 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state.
\r
744 \param [in] MainStackPtrLimit Main Stack Pointer value to set
\r
746 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit)
\r
748 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)))
\r
749 // without main extensions, the non-secure MSPLIM is RAZ/WI
\r
750 (void)MainStackPtrLimit;
\r
752 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit));
\r
757 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
\r
758 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
\r
763 \details Returns the current value of the Floating Point Status/Control register.
\r
764 \return Floating Point Status/Control register value
\r
766 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void)
\r
768 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
\r
769 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
\r
770 #if __has_builtin(__builtin_arm_get_fpscr)
\r
771 // Re-enable using built-in when GCC has been fixed
\r
772 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
\r
773 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
\r
774 return __builtin_arm_get_fpscr();
\r
778 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) );
\r
789 \details Assigns the given value to the Floating Point Status/Control register.
\r
790 \param [in] fpscr Floating Point Status/Control value to set
\r
792 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr)
\r
794 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
\r
795 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
\r
796 #if __has_builtin(__builtin_arm_set_fpscr)
\r
797 // Re-enable using built-in when GCC has been fixed
\r
798 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2)
\r
799 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */
\r
800 __builtin_arm_set_fpscr(fpscr);
\r
802 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory");
\r
810 /*@} end of CMSIS_Core_RegAccFunctions */
\r
813 /* ########################## Core Instruction Access ######################### */
\r
814 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
\r
815 Access to dedicated instructions
\r
819 /* Define macros for porting to both thumb1 and thumb2.
\r
820 * For thumb1, use low register (r0-r7), specified by constraint "l"
\r
821 * Otherwise, use general registers, specified by constraint "r" */
\r
822 #if defined (__thumb__) && !defined (__thumb2__)
\r
823 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
\r
824 #define __CMSIS_GCC_RW_REG(r) "+l" (r)
\r
825 #define __CMSIS_GCC_USE_REG(r) "l" (r)
\r
827 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
\r
828 #define __CMSIS_GCC_RW_REG(r) "+r" (r)
\r
829 #define __CMSIS_GCC_USE_REG(r) "r" (r)
\r
833 \brief No Operation
\r
834 \details No Operation does nothing. This instruction can be used for code alignment purposes.
\r
836 #define __NOP() __ASM volatile ("nop")
\r
839 \brief Wait For Interrupt
\r
840 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
\r
842 #define __WFI() __ASM volatile ("wfi")
\r
846 \brief Wait For Event
\r
847 \details Wait For Event is a hint instruction that permits the processor to enter
\r
848 a low-power state until one of a number of events occurs.
\r
850 #define __WFE() __ASM volatile ("wfe")
\r
855 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
\r
857 #define __SEV() __ASM volatile ("sev")
\r
861 \brief Instruction Synchronization Barrier
\r
862 \details Instruction Synchronization Barrier flushes the pipeline in the processor,
\r
863 so that all instructions following the ISB are fetched from cache or memory,
\r
864 after the instruction has been completed.
\r
866 __STATIC_FORCEINLINE void __ISB(void)
\r
868 __ASM volatile ("isb 0xF":::"memory");
\r
873 \brief Data Synchronization Barrier
\r
874 \details Acts as a special kind of Data Memory Barrier.
\r
875 It completes when all explicit memory accesses before this instruction complete.
\r
877 __STATIC_FORCEINLINE void __DSB(void)
\r
879 __ASM volatile ("dsb 0xF":::"memory");
\r
884 \brief Data Memory Barrier
\r
885 \details Ensures the apparent order of the explicit memory operations before
\r
886 and after the instruction, without ensuring their completion.
\r
888 __STATIC_FORCEINLINE void __DMB(void)
\r
890 __ASM volatile ("dmb 0xF":::"memory");
\r
895 \brief Reverse byte order (32 bit)
\r
896 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
\r
897 \param [in] value Value to reverse
\r
898 \return Reversed value
\r
900 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value)
\r
902 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
\r
903 return __builtin_bswap32(value);
\r
907 __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
914 \brief Reverse byte order (16 bit)
\r
915 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
\r
916 \param [in] value Value to reverse
\r
917 \return Reversed value
\r
919 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value)
\r
923 __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
929 \brief Reverse byte order (16 bit)
\r
930 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
\r
931 \param [in] value Value to reverse
\r
932 \return Reversed value
\r
934 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value)
\r
936 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
937 return (int16_t)__builtin_bswap16(value);
\r
941 __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
948 \brief Rotate Right in unsigned value (32 bit)
\r
949 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
\r
950 \param [in] op1 Value to rotate
\r
951 \param [in] op2 Number of Bits to rotate
\r
952 \return Rotated value
\r
954 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
\r
961 return (op1 >> op2) | (op1 << (32U - op2));
\r
967 \details Causes the processor to enter Debug state.
\r
968 Debug tools can use this to investigate system state when the instruction at a particular address is reached.
\r
969 \param [in] value is ignored by the processor.
\r
970 If required, a debugger can use it to store additional information about the breakpoint.
\r
972 #define __BKPT(value) __ASM volatile ("bkpt "#value)
\r
976 \brief Reverse bit order of value
\r
977 \details Reverses the bit order of the given value.
\r
978 \param [in] value Value to reverse
\r
979 \return Reversed value
\r
981 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value)
\r
985 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
986 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
987 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
\r
988 __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
\r
990 uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
\r
992 result = value; /* r will be reversed bits of v; first get LSB of v */
\r
993 for (value >>= 1U; value != 0U; value >>= 1U)
\r
996 result |= value & 1U;
\r
999 result <<= s; /* shift when v's highest bits are zero */
\r
1006 \brief Count leading zeros
\r
1007 \details Counts the number of leading zeros of a data value.
\r
1008 \param [in] value Value to count the leading zeros
\r
1009 \return number of leading zeros in value
\r
1011 #define __CLZ (uint8_t)__builtin_clz
\r
1014 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
1015 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
1016 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
\r
1017 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
\r
1019 \brief LDR Exclusive (8 bit)
\r
1020 \details Executes a exclusive LDR instruction for 8 bit value.
\r
1021 \param [in] ptr Pointer to data
\r
1022 \return value of type uint8_t at (*ptr)
\r
1024 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr)
\r
1028 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
1029 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
\r
1031 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
1032 accepted by assembler. So has to use following less efficient pattern.
\r
1034 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
\r
1036 return ((uint8_t) result); /* Add explicit type cast here */
\r
1041 \brief LDR Exclusive (16 bit)
\r
1042 \details Executes a exclusive LDR instruction for 16 bit values.
\r
1043 \param [in] ptr Pointer to data
\r
1044 \return value of type uint16_t at (*ptr)
\r
1046 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr)
\r
1050 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
1051 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
\r
1053 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
1054 accepted by assembler. So has to use following less efficient pattern.
\r
1056 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
\r
1058 return ((uint16_t) result); /* Add explicit type cast here */
\r
1063 \brief LDR Exclusive (32 bit)
\r
1064 \details Executes a exclusive LDR instruction for 32 bit values.
\r
1065 \param [in] ptr Pointer to data
\r
1066 \return value of type uint32_t at (*ptr)
\r
1068 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr)
\r
1072 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
\r
1078 \brief STR Exclusive (8 bit)
\r
1079 \details Executes a exclusive STR instruction for 8 bit values.
\r
1080 \param [in] value Value to store
\r
1081 \param [in] ptr Pointer to location
\r
1082 \return 0 Function succeeded
\r
1083 \return 1 Function failed
\r
1085 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
\r
1089 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
\r
1095 \brief STR Exclusive (16 bit)
\r
1096 \details Executes a exclusive STR instruction for 16 bit values.
\r
1097 \param [in] value Value to store
\r
1098 \param [in] ptr Pointer to location
\r
1099 \return 0 Function succeeded
\r
1100 \return 1 Function failed
\r
1102 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
\r
1106 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
\r
1112 \brief STR Exclusive (32 bit)
\r
1113 \details Executes a exclusive STR instruction for 32 bit values.
\r
1114 \param [in] value Value to store
\r
1115 \param [in] ptr Pointer to location
\r
1116 \return 0 Function succeeded
\r
1117 \return 1 Function failed
\r
1119 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
\r
1123 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
\r
1129 \brief Remove the exclusive lock
\r
1130 \details Removes the exclusive lock which is created by LDREX.
\r
1132 __STATIC_FORCEINLINE void __CLREX(void)
\r
1134 __ASM volatile ("clrex" ::: "memory");
\r
1137 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
1138 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
1139 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
\r
1140 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
\r
1143 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
1144 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
1145 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) )
\r
1147 \brief Signed Saturate
\r
1148 \details Saturates a signed value.
\r
1149 \param [in] ARG1 Value to be saturated
\r
1150 \param [in] ARG2 Bit position to saturate to (1..32)
\r
1151 \return Saturated value
\r
1153 #define __SSAT(ARG1,ARG2) \
\r
1156 int32_t __RES, __ARG1 = (ARG1); \
\r
1157 __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
1163 \brief Unsigned Saturate
\r
1164 \details Saturates an unsigned value.
\r
1165 \param [in] ARG1 Value to be saturated
\r
1166 \param [in] ARG2 Bit position to saturate to (0..31)
\r
1167 \return Saturated value
\r
1169 #define __USAT(ARG1,ARG2) \
\r
1172 uint32_t __RES, __ARG1 = (ARG1); \
\r
1173 __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
1179 \brief Rotate Right with Extend (32 bit)
\r
1180 \details Moves each bit of a bitstring right by one bit.
\r
1181 The carry input is shifted in at the left end of the bitstring.
\r
1182 \param [in] value Value to rotate
\r
1183 \return Rotated value
\r
1185 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value)
\r
1189 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
\r
1195 \brief LDRT Unprivileged (8 bit)
\r
1196 \details Executes a Unprivileged LDRT instruction for 8 bit value.
\r
1197 \param [in] ptr Pointer to data
\r
1198 \return value of type uint8_t at (*ptr)
\r
1200 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr)
\r
1204 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
1205 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1207 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
1208 accepted by assembler. So has to use following less efficient pattern.
\r
1210 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
\r
1212 return ((uint8_t) result); /* Add explicit type cast here */
\r
1217 \brief LDRT Unprivileged (16 bit)
\r
1218 \details Executes a Unprivileged LDRT instruction for 16 bit values.
\r
1219 \param [in] ptr Pointer to data
\r
1220 \return value of type uint16_t at (*ptr)
\r
1222 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr)
\r
1226 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
\r
1227 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1229 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
\r
1230 accepted by assembler. So has to use following less efficient pattern.
\r
1232 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" );
\r
1234 return ((uint16_t) result); /* Add explicit type cast here */
\r
1239 \brief LDRT Unprivileged (32 bit)
\r
1240 \details Executes a Unprivileged LDRT instruction for 32 bit values.
\r
1241 \param [in] ptr Pointer to data
\r
1242 \return value of type uint32_t at (*ptr)
\r
1244 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr)
\r
1248 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1254 \brief STRT Unprivileged (8 bit)
\r
1255 \details Executes a Unprivileged STRT instruction for 8 bit values.
\r
1256 \param [in] value Value to store
\r
1257 \param [in] ptr Pointer to location
\r
1259 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr)
\r
1261 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1266 \brief STRT Unprivileged (16 bit)
\r
1267 \details Executes a Unprivileged STRT instruction for 16 bit values.
\r
1268 \param [in] value Value to store
\r
1269 \param [in] ptr Pointer to location
\r
1271 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr)
\r
1273 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1278 \brief STRT Unprivileged (32 bit)
\r
1279 \details Executes a Unprivileged STRT instruction for 32 bit values.
\r
1280 \param [in] value Value to store
\r
1281 \param [in] ptr Pointer to location
\r
1283 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
\r
1285 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) );
\r
1288 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
1289 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
1290 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
\r
1293 \brief Signed Saturate
\r
1294 \details Saturates a signed value.
\r
1295 \param [in] value Value to be saturated
\r
1296 \param [in] sat Bit position to saturate to (1..32)
\r
1297 \return Saturated value
\r
1299 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat)
\r
1301 if ((sat >= 1U) && (sat <= 32U))
\r
1303 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
\r
1304 const int32_t min = -1 - max ;
\r
1309 else if (val < min)
\r
1318 \brief Unsigned Saturate
\r
1319 \details Saturates an unsigned value.
\r
1320 \param [in] value Value to be saturated
\r
1321 \param [in] sat Bit position to saturate to (0..31)
\r
1322 \return Saturated value
\r
1324 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat)
\r
1328 const uint32_t max = ((1U << sat) - 1U);
\r
1329 if (val > (int32_t)max)
\r
1338 return (uint32_t)val;
\r
1341 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \
\r
1342 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \
\r
1343 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */
\r
1346 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
\r
1347 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
\r
1349 \brief Load-Acquire (8 bit)
\r
1350 \details Executes a LDAB instruction for 8 bit value.
\r
1351 \param [in] ptr Pointer to data
\r
1352 \return value of type uint8_t at (*ptr)
\r
1354 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr)
\r
1358 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1359 return ((uint8_t) result);
\r
1364 \brief Load-Acquire (16 bit)
\r
1365 \details Executes a LDAH instruction for 16 bit values.
\r
1366 \param [in] ptr Pointer to data
\r
1367 \return value of type uint16_t at (*ptr)
\r
1369 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr)
\r
1373 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1374 return ((uint16_t) result);
\r
1379 \brief Load-Acquire (32 bit)
\r
1380 \details Executes a LDA instruction for 32 bit values.
\r
1381 \param [in] ptr Pointer to data
\r
1382 \return value of type uint32_t at (*ptr)
\r
1384 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr)
\r
1388 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1394 \brief Store-Release (8 bit)
\r
1395 \details Executes a STLB instruction for 8 bit values.
\r
1396 \param [in] value Value to store
\r
1397 \param [in] ptr Pointer to location
\r
1399 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr)
\r
1401 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1406 \brief Store-Release (16 bit)
\r
1407 \details Executes a STLH instruction for 16 bit values.
\r
1408 \param [in] value Value to store
\r
1409 \param [in] ptr Pointer to location
\r
1411 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr)
\r
1413 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1418 \brief Store-Release (32 bit)
\r
1419 \details Executes a STL instruction for 32 bit values.
\r
1420 \param [in] value Value to store
\r
1421 \param [in] ptr Pointer to location
\r
1423 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr)
\r
1425 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1430 \brief Load-Acquire Exclusive (8 bit)
\r
1431 \details Executes a LDAB exclusive instruction for 8 bit value.
\r
1432 \param [in] ptr Pointer to data
\r
1433 \return value of type uint8_t at (*ptr)
\r
1435 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr)
\r
1439 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1440 return ((uint8_t) result);
\r
1445 \brief Load-Acquire Exclusive (16 bit)
\r
1446 \details Executes a LDAH exclusive instruction for 16 bit values.
\r
1447 \param [in] ptr Pointer to data
\r
1448 \return value of type uint16_t at (*ptr)
\r
1450 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr)
\r
1454 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1455 return ((uint16_t) result);
\r
1460 \brief Load-Acquire Exclusive (32 bit)
\r
1461 \details Executes a LDA exclusive instruction for 32 bit values.
\r
1462 \param [in] ptr Pointer to data
\r
1463 \return value of type uint32_t at (*ptr)
\r
1465 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr)
\r
1469 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) );
\r
1475 \brief Store-Release Exclusive (8 bit)
\r
1476 \details Executes a STLB exclusive instruction for 8 bit values.
\r
1477 \param [in] value Value to store
\r
1478 \param [in] ptr Pointer to location
\r
1479 \return 0 Function succeeded
\r
1480 \return 1 Function failed
\r
1482 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
\r
1486 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1492 \brief Store-Release Exclusive (16 bit)
\r
1493 \details Executes a STLH exclusive instruction for 16 bit values.
\r
1494 \param [in] value Value to store
\r
1495 \param [in] ptr Pointer to location
\r
1496 \return 0 Function succeeded
\r
1497 \return 1 Function failed
\r
1499 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
\r
1503 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1509 \brief Store-Release Exclusive (32 bit)
\r
1510 \details Executes a STL exclusive instruction for 32 bit values.
\r
1511 \param [in] value Value to store
\r
1512 \param [in] ptr Pointer to location
\r
1513 \return 0 Function succeeded
\r
1514 \return 1 Function failed
\r
1516 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
\r
1520 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) );
\r
1524 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
\r
1525 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */
\r
1527 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
\r
1530 /* ################### Compiler specific Intrinsics ########################### */
\r
1531 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
\r
1532 Access to dedicated SIMD instructions
\r
1536 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1))
\r
1538 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2)
\r
1542 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1546 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2)
\r
1550 __ASM volatile ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1554 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2)
\r
1558 __ASM volatile ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1562 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2)
\r
1566 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1570 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2)
\r
1574 __ASM volatile ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1578 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2)
\r
1582 __ASM volatile ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1587 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2)
\r
1591 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1595 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2)
\r
1599 __ASM volatile ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1603 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2)
\r
1607 __ASM volatile ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1611 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2)
\r
1615 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1619 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2)
\r
1623 __ASM volatile ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1627 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2)
\r
1631 __ASM volatile ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1636 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2)
\r
1640 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1644 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2)
\r
1648 __ASM volatile ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1652 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2)
\r
1656 __ASM volatile ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1660 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2)
\r
1664 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1668 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2)
\r
1672 __ASM volatile ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1676 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2)
\r
1680 __ASM volatile ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1684 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2)
\r
1688 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1692 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2)
\r
1696 __ASM volatile ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1700 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2)
\r
1704 __ASM volatile ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1708 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2)
\r
1712 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1716 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2)
\r
1720 __ASM volatile ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1724 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2)
\r
1728 __ASM volatile ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1732 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2)
\r
1736 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1740 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2)
\r
1744 __ASM volatile ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1748 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2)
\r
1752 __ASM volatile ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1756 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2)
\r
1760 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1764 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2)
\r
1768 __ASM volatile ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1772 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2)
\r
1776 __ASM volatile ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1780 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2)
\r
1784 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1788 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2)
\r
1792 __ASM volatile ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1796 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2)
\r
1800 __ASM volatile ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1804 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2)
\r
1808 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1812 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2)
\r
1816 __ASM volatile ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1820 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2)
\r
1824 __ASM volatile ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1828 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2)
\r
1832 __ASM volatile ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1836 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3)
\r
1840 __ASM volatile ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
\r
1844 #define __SSAT16(ARG1,ARG2) \
\r
1846 int32_t __RES, __ARG1 = (ARG1); \
\r
1847 __ASM ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
1851 #define __USAT16(ARG1,ARG2) \
\r
1853 uint32_t __RES, __ARG1 = (ARG1); \
\r
1854 __ASM ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
\r
1858 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1)
\r
1862 __ASM volatile ("uxtb16 %0, %1" : "=r" (result) : "r" (op1));
\r
1866 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2)
\r
1870 __ASM volatile ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1874 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1)
\r
1878 __ASM volatile ("sxtb16 %0, %1" : "=r" (result) : "r" (op1));
\r
1882 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2)
\r
1886 __ASM volatile ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1890 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2)
\r
1894 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1898 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2)
\r
1902 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1906 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3)
\r
1910 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
\r
1914 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3)
\r
1918 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
\r
1922 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc)
\r
1930 #ifndef __ARMEB__ /* Little endian */
\r
1931 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
\r
1932 #else /* Big endian */
\r
1933 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
\r
1939 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc)
\r
1947 #ifndef __ARMEB__ /* Little endian */
\r
1948 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
\r
1949 #else /* Big endian */
\r
1950 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
\r
1956 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2)
\r
1960 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1964 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2)
\r
1968 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
1972 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3)
\r
1976 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
\r
1980 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3)
\r
1984 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) );
\r
1988 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc)
\r
1996 #ifndef __ARMEB__ /* Little endian */
\r
1997 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
\r
1998 #else /* Big endian */
\r
1999 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
\r
2005 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc)
\r
2013 #ifndef __ARMEB__ /* Little endian */
\r
2014 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) );
\r
2015 #else /* Big endian */
\r
2016 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) );
\r
2022 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2)
\r
2026 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
2030 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2)
\r
2034 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
2038 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2)
\r
2042 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) );
\r
2047 #define __PKHBT(ARG1,ARG2,ARG3) \
\r
2049 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
\r
2050 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
\r
2054 #define __PKHTB(ARG1,ARG2,ARG3) \
\r
2056 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \
\r
2058 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \
\r
2060 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \
\r
2065 #define __PKHBT(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
\r
2066 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
\r
2068 #define __PKHTB(ARG1,ARG2,ARG3) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
\r
2069 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
\r
2071 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3)
\r
2075 __ASM volatile ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) );
\r
2079 #endif /* (__ARM_FEATURE_DSP == 1) */
\r
2080 /*@} end of group CMSIS_SIMD_intrinsics */
\r
2083 #pragma GCC diagnostic pop
\r
2085 #endif /* __CMSIS_GCC_H */
\r