before merging master
[inav.git] / lib / main / CMSIS / Core / Include / cmsis_iccarm.h
blobedcaee3d4ab1e4026b53858e06930dd53d71f038
1 /**************************************************************************//**
2 * @file cmsis_iccarm.h
3 * @brief CMSIS compiler ICCARM (IAR Compiler for Arm) header file
4 * @version V5.0.5
5 * @date 10. January 2018
6 ******************************************************************************/
8 //------------------------------------------------------------------------------
9 //
10 // Copyright (c) 2017-2018 IAR Systems
12 // Licensed under the Apache License, Version 2.0 (the "License")
13 // you may not use this file except in compliance with the License.
14 // You may obtain a copy of the License at
15 // http://www.apache.org/licenses/LICENSE-2.0
17 // Unless required by applicable law or agreed to in writing, software
18 // distributed under the License is distributed on an "AS IS" BASIS,
19 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
20 // See the License for the specific language governing permissions and
21 // limitations under the License.
23 //------------------------------------------------------------------------------
26 #ifndef __CMSIS_ICCARM_H__
27 #define __CMSIS_ICCARM_H__
29 #ifndef __ICCARM__
30 #error This file should only be compiled by ICCARM
31 #endif
33 #pragma system_include
35 #define __IAR_FT _Pragma("inline=forced") __intrinsic
37 #if (__VER__ >= 8000000)
38 #define __ICCARM_V8 1
39 #else
40 #define __ICCARM_V8 0
41 #endif
43 #ifndef __ALIGNED
44 #if __ICCARM_V8
45 #define __ALIGNED(x) __attribute__((aligned(x)))
46 #elif (__VER__ >= 7080000)
47 /* Needs IAR language extensions */
48 #define __ALIGNED(x) __attribute__((aligned(x)))
49 #else
50 #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
51 #define __ALIGNED(x)
52 #endif
53 #endif
56 /* Define compiler macros for CPU architecture, used in CMSIS 5.
58 #if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__
59 /* Macros already defined */
60 #else
61 #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__)
62 #define __ARM_ARCH_8M_MAIN__ 1
63 #elif defined(__ARM8M_BASELINE__)
64 #define __ARM_ARCH_8M_BASE__ 1
65 #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M'
66 #if __ARM_ARCH == 6
67 #define __ARM_ARCH_6M__ 1
68 #elif __ARM_ARCH == 7
69 #if __ARM_FEATURE_DSP
70 #define __ARM_ARCH_7EM__ 1
71 #else
72 #define __ARM_ARCH_7M__ 1
73 #endif
74 #endif /* __ARM_ARCH */
75 #endif /* __ARM_ARCH_PROFILE == 'M' */
76 #endif
78 /* Alternativ core deduction for older ICCARM's */
79 #if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \
80 !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__)
81 #if defined(__ARM6M__) && (__CORE__ == __ARM6M__)
82 #define __ARM_ARCH_6M__ 1
83 #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__)
84 #define __ARM_ARCH_7M__ 1
85 #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__)
86 #define __ARM_ARCH_7EM__ 1
87 #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__)
88 #define __ARM_ARCH_8M_BASE__ 1
89 #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__)
90 #define __ARM_ARCH_8M_MAIN__ 1
91 #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__)
92 #define __ARM_ARCH_8M_MAIN__ 1
93 #else
94 #error "Unknown target."
95 #endif
96 #endif
100 #if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1
101 #define __IAR_M0_FAMILY 1
102 #elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1
103 #define __IAR_M0_FAMILY 1
104 #else
105 #define __IAR_M0_FAMILY 0
106 #endif
109 #ifndef __ASM
110 #define __ASM __asm
111 #endif
113 #ifndef __INLINE
114 #define __INLINE inline
115 #endif
117 #ifndef __NO_RETURN
118 #if __ICCARM_V8
119 #define __NO_RETURN __attribute__((__noreturn__))
120 #else
121 #define __NO_RETURN _Pragma("object_attribute=__noreturn")
122 #endif
123 #endif
125 #ifndef __PACKED
126 #if __ICCARM_V8
127 #define __PACKED __attribute__((packed, aligned(1)))
128 #else
129 /* Needs IAR language extensions */
130 #define __PACKED __packed
131 #endif
132 #endif
134 #ifndef __PACKED_STRUCT
135 #if __ICCARM_V8
136 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
137 #else
138 /* Needs IAR language extensions */
139 #define __PACKED_STRUCT __packed struct
140 #endif
141 #endif
143 #ifndef __PACKED_UNION
144 #if __ICCARM_V8
145 #define __PACKED_UNION union __attribute__((packed, aligned(1)))
146 #else
147 /* Needs IAR language extensions */
148 #define __PACKED_UNION __packed union
149 #endif
150 #endif
152 #ifndef __RESTRICT
153 #define __RESTRICT restrict
154 #endif
156 #ifndef __STATIC_INLINE
157 #define __STATIC_INLINE static inline
158 #endif
160 #ifndef __FORCEINLINE
161 #define __FORCEINLINE _Pragma("inline=forced")
162 #endif
164 #ifndef __STATIC_FORCEINLINE
165 #define __STATIC_FORCEINLINE __FORCEINLINE __STATIC_INLINE
166 #endif
168 #ifndef __UNALIGNED_UINT16_READ
169 #pragma language=save
170 #pragma language=extended
171 __IAR_FT uint16_t __iar_uint16_read(void const *ptr)
173 return *(__packed uint16_t*)(ptr);
175 #pragma language=restore
176 #define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
177 #endif
180 #ifndef __UNALIGNED_UINT16_WRITE
181 #pragma language=save
182 #pragma language=extended
183 __IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
185 *(__packed uint16_t*)(ptr) = val;;
187 #pragma language=restore
188 #define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
189 #endif
191 #ifndef __UNALIGNED_UINT32_READ
192 #pragma language=save
193 #pragma language=extended
194 __IAR_FT uint32_t __iar_uint32_read(void const *ptr)
196 return *(__packed uint32_t*)(ptr);
198 #pragma language=restore
199 #define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
200 #endif
202 #ifndef __UNALIGNED_UINT32_WRITE
203 #pragma language=save
204 #pragma language=extended
205 __IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
207 *(__packed uint32_t*)(ptr) = val;;
209 #pragma language=restore
210 #define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
211 #endif
213 #ifndef __UNALIGNED_UINT32 /* deprecated */
214 #pragma language=save
215 #pragma language=extended
216 __packed struct __iar_u32 { uint32_t v; };
217 #pragma language=restore
218 #define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v)
219 #endif
221 #ifndef __USED
222 #if __ICCARM_V8
223 #define __USED __attribute__((used))
224 #else
225 #define __USED _Pragma("__root")
226 #endif
227 #endif
229 #ifndef __WEAK
230 #if __ICCARM_V8
231 #define __WEAK __attribute__((weak))
232 #else
233 #define __WEAK _Pragma("__weak")
234 #endif
235 #endif
238 #ifndef __ICCARM_INTRINSICS_VERSION__
239 #define __ICCARM_INTRINSICS_VERSION__ 0
240 #endif
242 #if __ICCARM_INTRINSICS_VERSION__ == 2
244 #if defined(__CLZ)
245 #undef __CLZ
246 #endif
247 #if defined(__REVSH)
248 #undef __REVSH
249 #endif
250 #if defined(__RBIT)
251 #undef __RBIT
252 #endif
253 #if defined(__SSAT)
254 #undef __SSAT
255 #endif
256 #if defined(__USAT)
257 #undef __USAT
258 #endif
260 #include "iccarm_builtin.h"
262 #define __disable_fault_irq __iar_builtin_disable_fiq
263 #define __disable_irq __iar_builtin_disable_interrupt
264 #define __enable_fault_irq __iar_builtin_enable_fiq
265 #define __enable_irq __iar_builtin_enable_interrupt
266 #define __arm_rsr __iar_builtin_rsr
267 #define __arm_wsr __iar_builtin_wsr
270 #define __get_APSR() (__arm_rsr("APSR"))
271 #define __get_BASEPRI() (__arm_rsr("BASEPRI"))
272 #define __get_CONTROL() (__arm_rsr("CONTROL"))
273 #define __get_FAULTMASK() (__arm_rsr("FAULTMASK"))
275 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
276 (defined (__FPU_USED ) && (__FPU_USED == 1U)) )
277 #define __get_FPSCR() (__arm_rsr("FPSCR"))
278 #define __set_FPSCR(VALUE) (__arm_wsr("FPSCR", (VALUE)))
279 #else
280 #define __get_FPSCR() ( 0 )
281 #define __set_FPSCR(VALUE) ((void)VALUE)
282 #endif
284 #define __get_IPSR() (__arm_rsr("IPSR"))
285 #define __get_MSP() (__arm_rsr("MSP"))
286 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
287 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
288 // without main extensions, the non-secure MSPLIM is RAZ/WI
289 #define __get_MSPLIM() (0U)
290 #else
291 #define __get_MSPLIM() (__arm_rsr("MSPLIM"))
292 #endif
293 #define __get_PRIMASK() (__arm_rsr("PRIMASK"))
294 #define __get_PSP() (__arm_rsr("PSP"))
296 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
297 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
298 // without main extensions, the non-secure PSPLIM is RAZ/WI
299 #define __get_PSPLIM() (0U)
300 #else
301 #define __get_PSPLIM() (__arm_rsr("PSPLIM"))
302 #endif
304 #define __get_xPSR() (__arm_rsr("xPSR"))
306 #define __set_BASEPRI(VALUE) (__arm_wsr("BASEPRI", (VALUE)))
307 #define __set_BASEPRI_MAX(VALUE) (__arm_wsr("BASEPRI_MAX", (VALUE)))
308 #define __set_CONTROL(VALUE) (__arm_wsr("CONTROL", (VALUE)))
309 #define __set_FAULTMASK(VALUE) (__arm_wsr("FAULTMASK", (VALUE)))
310 #define __set_MSP(VALUE) (__arm_wsr("MSP", (VALUE)))
312 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
313 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
314 // without main extensions, the non-secure MSPLIM is RAZ/WI
315 #define __set_MSPLIM(VALUE) ((void)(VALUE))
316 #else
317 #define __set_MSPLIM(VALUE) (__arm_wsr("MSPLIM", (VALUE)))
318 #endif
319 #define __set_PRIMASK(VALUE) (__arm_wsr("PRIMASK", (VALUE)))
320 #define __set_PSP(VALUE) (__arm_wsr("PSP", (VALUE)))
321 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
322 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
323 // without main extensions, the non-secure PSPLIM is RAZ/WI
324 #define __set_PSPLIM(VALUE) ((void)(VALUE))
325 #else
326 #define __set_PSPLIM(VALUE) (__arm_wsr("PSPLIM", (VALUE)))
327 #endif
329 #define __TZ_get_CONTROL_NS() (__arm_rsr("CONTROL_NS"))
330 #define __TZ_set_CONTROL_NS(VALUE) (__arm_wsr("CONTROL_NS", (VALUE)))
331 #define __TZ_get_PSP_NS() (__arm_rsr("PSP_NS"))
332 #define __TZ_set_PSP_NS(VALUE) (__arm_wsr("PSP_NS", (VALUE)))
333 #define __TZ_get_MSP_NS() (__arm_rsr("MSP_NS"))
334 #define __TZ_set_MSP_NS(VALUE) (__arm_wsr("MSP_NS", (VALUE)))
335 #define __TZ_get_SP_NS() (__arm_rsr("SP_NS"))
336 #define __TZ_set_SP_NS(VALUE) (__arm_wsr("SP_NS", (VALUE)))
337 #define __TZ_get_PRIMASK_NS() (__arm_rsr("PRIMASK_NS"))
338 #define __TZ_set_PRIMASK_NS(VALUE) (__arm_wsr("PRIMASK_NS", (VALUE)))
339 #define __TZ_get_BASEPRI_NS() (__arm_rsr("BASEPRI_NS"))
340 #define __TZ_set_BASEPRI_NS(VALUE) (__arm_wsr("BASEPRI_NS", (VALUE)))
341 #define __TZ_get_FAULTMASK_NS() (__arm_rsr("FAULTMASK_NS"))
342 #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE)))
343 #define __TZ_get_PSPLIM_NS() (__arm_rsr("PSPLIM_NS"))
344 #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE)))
345 #define __TZ_get_MSPLIM_NS() (__arm_rsr("MSPLIM_NS"))
346 #define __TZ_set_MSPLIM_NS(VALUE) (__arm_wsr("MSPLIM_NS", (VALUE)))
348 #define __NOP __iar_builtin_no_operation
350 #define __CLZ __iar_builtin_CLZ
351 #define __CLREX __iar_builtin_CLREX
353 #define __DMB __iar_builtin_DMB
354 #define __DSB __iar_builtin_DSB
355 #define __ISB __iar_builtin_ISB
357 #define __LDREXB __iar_builtin_LDREXB
358 #define __LDREXH __iar_builtin_LDREXH
359 #define __LDREXW __iar_builtin_LDREX
361 #define __RBIT __iar_builtin_RBIT
362 #define __REV __iar_builtin_REV
363 #define __REV16 __iar_builtin_REV16
365 __IAR_FT int16_t __REVSH(int16_t val)
367 return (int16_t) __iar_builtin_REVSH(val);
370 #define __ROR __iar_builtin_ROR
371 #define __RRX __iar_builtin_RRX
373 #define __SEV __iar_builtin_SEV
375 #if !__IAR_M0_FAMILY
376 #define __SSAT __iar_builtin_SSAT
377 #endif
379 #define __STREXB __iar_builtin_STREXB
380 #define __STREXH __iar_builtin_STREXH
381 #define __STREXW __iar_builtin_STREX
383 #if !__IAR_M0_FAMILY
384 #define __USAT __iar_builtin_USAT
385 #endif
387 #define __WFE __iar_builtin_WFE
388 #define __WFI __iar_builtin_WFI
390 #if __ARM_MEDIA__
391 #define __SADD8 __iar_builtin_SADD8
392 #define __QADD8 __iar_builtin_QADD8
393 #define __SHADD8 __iar_builtin_SHADD8
394 #define __UADD8 __iar_builtin_UADD8
395 #define __UQADD8 __iar_builtin_UQADD8
396 #define __UHADD8 __iar_builtin_UHADD8
397 #define __SSUB8 __iar_builtin_SSUB8
398 #define __QSUB8 __iar_builtin_QSUB8
399 #define __SHSUB8 __iar_builtin_SHSUB8
400 #define __USUB8 __iar_builtin_USUB8
401 #define __UQSUB8 __iar_builtin_UQSUB8
402 #define __UHSUB8 __iar_builtin_UHSUB8
403 #define __SADD16 __iar_builtin_SADD16
404 #define __QADD16 __iar_builtin_QADD16
405 #define __SHADD16 __iar_builtin_SHADD16
406 #define __UADD16 __iar_builtin_UADD16
407 #define __UQADD16 __iar_builtin_UQADD16
408 #define __UHADD16 __iar_builtin_UHADD16
409 #define __SSUB16 __iar_builtin_SSUB16
410 #define __QSUB16 __iar_builtin_QSUB16
411 #define __SHSUB16 __iar_builtin_SHSUB16
412 #define __USUB16 __iar_builtin_USUB16
413 #define __UQSUB16 __iar_builtin_UQSUB16
414 #define __UHSUB16 __iar_builtin_UHSUB16
415 #define __SASX __iar_builtin_SASX
416 #define __QASX __iar_builtin_QASX
417 #define __SHASX __iar_builtin_SHASX
418 #define __UASX __iar_builtin_UASX
419 #define __UQASX __iar_builtin_UQASX
420 #define __UHASX __iar_builtin_UHASX
421 #define __SSAX __iar_builtin_SSAX
422 #define __QSAX __iar_builtin_QSAX
423 #define __SHSAX __iar_builtin_SHSAX
424 #define __USAX __iar_builtin_USAX
425 #define __UQSAX __iar_builtin_UQSAX
426 #define __UHSAX __iar_builtin_UHSAX
427 #define __USAD8 __iar_builtin_USAD8
428 #define __USADA8 __iar_builtin_USADA8
429 #define __SSAT16 __iar_builtin_SSAT16
430 #define __USAT16 __iar_builtin_USAT16
431 #define __UXTB16 __iar_builtin_UXTB16
432 #define __UXTAB16 __iar_builtin_UXTAB16
433 #define __SXTB16 __iar_builtin_SXTB16
434 #define __SXTAB16 __iar_builtin_SXTAB16
435 #define __SMUAD __iar_builtin_SMUAD
436 #define __SMUADX __iar_builtin_SMUADX
437 #define __SMMLA __iar_builtin_SMMLA
438 #define __SMLAD __iar_builtin_SMLAD
439 #define __SMLADX __iar_builtin_SMLADX
440 #define __SMLALD __iar_builtin_SMLALD
441 #define __SMLALDX __iar_builtin_SMLALDX
442 #define __SMUSD __iar_builtin_SMUSD
443 #define __SMUSDX __iar_builtin_SMUSDX
444 #define __SMLSD __iar_builtin_SMLSD
445 #define __SMLSDX __iar_builtin_SMLSDX
446 #define __SMLSLD __iar_builtin_SMLSLD
447 #define __SMLSLDX __iar_builtin_SMLSLDX
448 #define __SEL __iar_builtin_SEL
449 #define __QADD __iar_builtin_QADD
450 #define __QSUB __iar_builtin_QSUB
451 #define __PKHBT __iar_builtin_PKHBT
452 #define __PKHTB __iar_builtin_PKHTB
453 #endif
455 #else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
457 #if __IAR_M0_FAMILY
458 /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
459 #define __CLZ __cmsis_iar_clz_not_active
460 #define __SSAT __cmsis_iar_ssat_not_active
461 #define __USAT __cmsis_iar_usat_not_active
462 #define __RBIT __cmsis_iar_rbit_not_active
463 #define __get_APSR __cmsis_iar_get_APSR_not_active
464 #endif
467 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
468 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ))
469 #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
470 #define __set_FPSCR __cmsis_iar_set_FPSR_not_active
471 #endif
473 #ifdef __INTRINSICS_INCLUDED
474 #error intrinsics.h is already included previously!
475 #endif
477 #include <intrinsics.h>
479 #if __IAR_M0_FAMILY
480 /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
481 #undef __CLZ
482 #undef __SSAT
483 #undef __USAT
484 #undef __RBIT
485 #undef __get_APSR
487 __STATIC_INLINE uint8_t __CLZ(uint32_t data)
489 if (data == 0U) { return 32U; }
491 uint32_t count = 0U;
492 uint32_t mask = 0x80000000U;
494 while ((data & mask) == 0U)
496 count += 1U;
497 mask = mask >> 1U;
499 return count;
502 __STATIC_INLINE uint32_t __RBIT(uint32_t v)
504 uint8_t sc = 31U;
505 uint32_t r = v;
506 for (v >>= 1U; v; v >>= 1U)
508 r <<= 1U;
509 r |= v & 1U;
510 sc--;
512 return (r << sc);
515 __STATIC_INLINE uint32_t __get_APSR(void)
517 uint32_t res;
518 __asm("MRS %0,APSR" : "=r" (res));
519 return res;
522 #endif
524 #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
525 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ))
526 #undef __get_FPSCR
527 #undef __set_FPSCR
528 #define __get_FPSCR() (0)
529 #define __set_FPSCR(VALUE) ((void)VALUE)
530 #endif
532 #pragma diag_suppress=Pe940
533 #pragma diag_suppress=Pe177
535 #define __enable_irq __enable_interrupt
536 #define __disable_irq __disable_interrupt
537 #define __NOP __no_operation
539 #define __get_xPSR __get_PSR
541 #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0)
543 __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
545 return __LDREX((unsigned long *)ptr);
548 __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
550 return __STREX(value, (unsigned long *)ptr);
552 #endif
555 /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
556 #if (__CORTEX_M >= 0x03)
558 __IAR_FT uint32_t __RRX(uint32_t value)
560 uint32_t result;
561 __ASM("RRX %0, %1" : "=r"(result) : "r" (value) : "cc");
562 return(result);
565 __IAR_FT void __set_BASEPRI_MAX(uint32_t value)
567 __asm volatile("MSR BASEPRI_MAX,%0"::"r" (value));
571 #define __enable_fault_irq __enable_fiq
572 #define __disable_fault_irq __disable_fiq
575 #endif /* (__CORTEX_M >= 0x03) */
577 __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
579 return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
582 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
583 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
585 __IAR_FT uint32_t __get_MSPLIM(void)
587 uint32_t res;
588 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
589 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
590 // without main extensions, the non-secure MSPLIM is RAZ/WI
591 res = 0U;
592 #else
593 __asm volatile("MRS %0,MSPLIM" : "=r" (res));
594 #endif
595 return res;
598 __IAR_FT void __set_MSPLIM(uint32_t value)
600 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
601 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
602 // without main extensions, the non-secure MSPLIM is RAZ/WI
603 (void)value;
604 #else
605 __asm volatile("MSR MSPLIM,%0" :: "r" (value));
606 #endif
609 __IAR_FT uint32_t __get_PSPLIM(void)
611 uint32_t res;
612 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
613 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
614 // without main extensions, the non-secure PSPLIM is RAZ/WI
615 res = 0U;
616 #else
617 __asm volatile("MRS %0,PSPLIM" : "=r" (res));
618 #endif
619 return res;
622 __IAR_FT void __set_PSPLIM(uint32_t value)
624 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
625 (!defined (__ARM_FEATURE_CMSE ) || (__ARM_FEATURE_CMSE < 3)))
626 // without main extensions, the non-secure PSPLIM is RAZ/WI
627 (void)value;
628 #else
629 __asm volatile("MSR PSPLIM,%0" :: "r" (value));
630 #endif
633 __IAR_FT uint32_t __TZ_get_CONTROL_NS(void)
635 uint32_t res;
636 __asm volatile("MRS %0,CONTROL_NS" : "=r" (res));
637 return res;
640 __IAR_FT void __TZ_set_CONTROL_NS(uint32_t value)
642 __asm volatile("MSR CONTROL_NS,%0" :: "r" (value));
645 __IAR_FT uint32_t __TZ_get_PSP_NS(void)
647 uint32_t res;
648 __asm volatile("MRS %0,PSP_NS" : "=r" (res));
649 return res;
652 __IAR_FT void __TZ_set_PSP_NS(uint32_t value)
654 __asm volatile("MSR PSP_NS,%0" :: "r" (value));
657 __IAR_FT uint32_t __TZ_get_MSP_NS(void)
659 uint32_t res;
660 __asm volatile("MRS %0,MSP_NS" : "=r" (res));
661 return res;
664 __IAR_FT void __TZ_set_MSP_NS(uint32_t value)
666 __asm volatile("MSR MSP_NS,%0" :: "r" (value));
669 __IAR_FT uint32_t __TZ_get_SP_NS(void)
671 uint32_t res;
672 __asm volatile("MRS %0,SP_NS" : "=r" (res));
673 return res;
675 __IAR_FT void __TZ_set_SP_NS(uint32_t value)
677 __asm volatile("MSR SP_NS,%0" :: "r" (value));
680 __IAR_FT uint32_t __TZ_get_PRIMASK_NS(void)
682 uint32_t res;
683 __asm volatile("MRS %0,PRIMASK_NS" : "=r" (res));
684 return res;
687 __IAR_FT void __TZ_set_PRIMASK_NS(uint32_t value)
689 __asm volatile("MSR PRIMASK_NS,%0" :: "r" (value));
692 __IAR_FT uint32_t __TZ_get_BASEPRI_NS(void)
694 uint32_t res;
695 __asm volatile("MRS %0,BASEPRI_NS" : "=r" (res));
696 return res;
699 __IAR_FT void __TZ_set_BASEPRI_NS(uint32_t value)
701 __asm volatile("MSR BASEPRI_NS,%0" :: "r" (value));
704 __IAR_FT uint32_t __TZ_get_FAULTMASK_NS(void)
706 uint32_t res;
707 __asm volatile("MRS %0,FAULTMASK_NS" : "=r" (res));
708 return res;
711 __IAR_FT void __TZ_set_FAULTMASK_NS(uint32_t value)
713 __asm volatile("MSR FAULTMASK_NS,%0" :: "r" (value));
716 __IAR_FT uint32_t __TZ_get_PSPLIM_NS(void)
718 uint32_t res;
719 __asm volatile("MRS %0,PSPLIM_NS" : "=r" (res));
720 return res;
722 __IAR_FT void __TZ_set_PSPLIM_NS(uint32_t value)
724 __asm volatile("MSR PSPLIM_NS,%0" :: "r" (value));
727 __IAR_FT uint32_t __TZ_get_MSPLIM_NS(void)
729 uint32_t res;
730 __asm volatile("MRS %0,MSPLIM_NS" : "=r" (res));
731 return res;
734 __IAR_FT void __TZ_set_MSPLIM_NS(uint32_t value)
736 __asm volatile("MSR MSPLIM_NS,%0" :: "r" (value));
739 #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
741 #endif /* __ICCARM_INTRINSICS_VERSION__ == 2 */
743 #define __BKPT(value) __asm volatile ("BKPT %0" : : "i"(value))
745 #if __IAR_M0_FAMILY
746 __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
748 if ((sat >= 1U) && (sat <= 32U))
750 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
751 const int32_t min = -1 - max ;
752 if (val > max)
754 return max;
756 else if (val < min)
758 return min;
761 return val;
764 __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
766 if (sat <= 31U)
768 const uint32_t max = ((1U << sat) - 1U);
769 if (val > (int32_t)max)
771 return max;
773 else if (val < 0)
775 return 0U;
778 return (uint32_t)val;
780 #endif
782 #if (__CORTEX_M >= 0x03) /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
784 __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr)
786 uint32_t res;
787 __ASM("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
788 return ((uint8_t)res);
791 __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr)
793 uint32_t res;
794 __ASM("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
795 return ((uint16_t)res);
798 __IAR_FT uint32_t __LDRT(volatile uint32_t *addr)
800 uint32_t res;
801 __ASM("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
802 return res;
805 __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr)
807 __ASM("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
810 __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr)
812 __ASM("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
815 __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr)
817 __ASM("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory");
820 #endif /* (__CORTEX_M >= 0x03) */
822 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
823 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) )
826 __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr)
828 uint32_t res;
829 __ASM volatile ("LDAB %0, [%1]" : "=r" (res) : "r" (*ptr) : "memory");
830 return ((uint8_t)res);
833 __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr)
835 uint32_t res;
836 __ASM volatile ("LDAH %0, [%1]" : "=r" (res) : "r" (*ptr) : "memory");
837 return ((uint16_t)res);
840 __IAR_FT uint32_t __LDA(volatile uint32_t *ptr)
842 uint32_t res;
843 __ASM volatile ("LDA %0, [%1]" : "=r" (res) : "r" (*ptr) : "memory");
844 return res;
847 __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr)
849 __ASM volatile ("STLB %1, [%0]" :: "r" (*ptr), "r" (value) : "memory");
852 __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr)
854 __ASM volatile ("STLH %1, [%0]" :: "r" (*ptr), "r" (value) : "memory");
857 __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr)
859 __ASM volatile ("STL %1, [%0]" :: "r" (*ptr), "r" (value) : "memory");
862 __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr)
864 uint32_t res;
865 __ASM volatile ("LDAEXB %0, [%1]" : "=r" (res) : "r" (*ptr) : "memory");
866 return ((uint8_t)res);
869 __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr)
871 uint32_t res;
872 __ASM volatile ("LDAEXH %0, [%1]" : "=r" (res) : "r" (*ptr) : "memory");
873 return ((uint16_t)res);
876 __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr)
878 uint32_t res;
879 __ASM volatile ("LDAEX %0, [%1]" : "=r" (res) : "r" (*ptr) : "memory");
880 return res;
883 __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
885 uint32_t res;
886 __ASM volatile ("STLEXB %0, %2, [%1]" : "=r" (res) : "r" (*ptr), "r" (value) : "memory");
887 return res;
890 __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
892 uint32_t res;
893 __ASM volatile ("STLEXH %0, %2, [%1]" : "=r" (res) : "r" (*ptr), "r" (value) : "memory");
894 return res;
897 __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
899 uint32_t res;
900 __ASM volatile ("STLEX %0, %2, [%1]" : "=r" (res) : "r" (*ptr), "r" (value) : "memory");
901 return res;
904 #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
906 #undef __IAR_FT
907 #undef __IAR_M0_FAMILY
908 #undef __ICCARM_V8
910 #pragma diag_default=Pe940
911 #pragma diag_default=Pe177
913 #endif /* __CMSIS_ICCARM_H__ */