cmsis_gcc.h
1 /**************************************************************************//** 2 * @file cmsis_gcc.h 3 * @brief CMSIS compiler GCC header file 4 * @version V5.4.1 5 * @date 27. May 2021 6 ******************************************************************************/ 7 /* 8 * Copyright (c) 2009-2021 Arm Limited. All rights reserved. 9 * 10 * SPDX-License-Identifier: Apache-2.0 11 * 12 * Licensed under the Apache License, Version 2.0 (the License); you may 13 * not use this file except in compliance with the License. 14 * You may obtain a copy of the License at 15 * 16 * www.apache.org/licenses/LICENSE-2.0 17 * 18 * Unless required by applicable law or agreed to in writing, software 19 * distributed under the License is distributed on an AS IS BASIS, WITHOUT 20 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 21 * See the License for the specific language governing permissions and 22 * limitations under the License. 23 */ 24 25 #ifndef __CMSIS_GCC_H 26 #define __CMSIS_GCC_H 27 28 /* ignore some GCC warnings */ 29 #pragma GCC diagnostic push 30 #pragma GCC diagnostic ignored "-Wsign-conversion" 31 #pragma GCC diagnostic ignored "-Wconversion" 32 #pragma GCC diagnostic ignored "-Wunused-parameter" 33 34 /* Fallback for __has_builtin */ 35 #ifndef __has_builtin 36 #define __has_builtin(x) (0) 37 #endif 38 39 /* CMSIS compiler specific defines */ 40 #ifndef __ASM 41 #define __ASM __asm 42 #endif 43 #ifndef __INLINE 44 #define __INLINE inline 45 #endif 46 #ifndef __STATIC_INLINE 47 #define __STATIC_INLINE static inline 48 #endif 49 #ifndef __STATIC_FORCEINLINE 50 #define __STATIC_FORCEINLINE __attribute__((always_inline)) static inline 51 #endif 52 #ifndef __NO_RETURN 53 #define __NO_RETURN __attribute__((__noreturn__)) 54 #endif 55 #ifndef __USED 56 #define __USED __attribute__((used)) 57 #endif 58 #ifndef __WEAK 59 #define __WEAK __attribute__((weak)) 60 #endif 61 #ifndef __PACKED 62 #define __PACKED __attribute__((packed, aligned(1))) 63 #endif 64 #ifndef __PACKED_STRUCT 65 #define __PACKED_STRUCT struct __attribute__((packed, aligned(1))) 66 #endif 67 #ifndef __PACKED_UNION 68 #define __PACKED_UNION union __attribute__((packed, aligned(1))) 69 #endif 70 #ifndef __UNALIGNED_UINT32 /* deprecated */ 71 #pragma GCC diagnostic push 72 #pragma GCC diagnostic ignored "-Wpacked" 73 #pragma GCC diagnostic ignored "-Wattributes" 74 struct __attribute__((packed)) T_UINT32 { uint32_t v; }; 75 #pragma GCC diagnostic pop 76 #define __UNALIGNED_UINT32(x) (((struct T_UINT32 *)(x))->v) 77 #endif 78 #ifndef __UNALIGNED_UINT16_WRITE 79 #pragma GCC diagnostic push 80 #pragma GCC diagnostic ignored "-Wpacked" 81 #pragma GCC diagnostic ignored "-Wattributes" 82 __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; }; 83 #pragma GCC diagnostic pop 84 #define __UNALIGNED_UINT16_WRITE(addr, val) (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val)) 85 #endif 86 #ifndef __UNALIGNED_UINT16_READ 87 #pragma GCC diagnostic push 88 #pragma GCC diagnostic ignored "-Wpacked" 89 #pragma GCC diagnostic ignored "-Wattributes" 90 __PACKED_STRUCT T_UINT16_READ { uint16_t v; }; 91 #pragma GCC diagnostic pop 92 #define __UNALIGNED_UINT16_READ(addr) (((const struct T_UINT16_READ *)(const void *)(addr))->v) 93 #endif 94 #ifndef __UNALIGNED_UINT32_WRITE 95 #pragma GCC diagnostic push 96 #pragma GCC diagnostic ignored "-Wpacked" 97 #pragma GCC diagnostic ignored "-Wattributes" 98 __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; }; 99 #pragma GCC diagnostic pop 100 #define __UNALIGNED_UINT32_WRITE(addr, val) (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val)) 101 #endif 102 #ifndef __UNALIGNED_UINT32_READ 103 #pragma GCC diagnostic push 104 #pragma GCC diagnostic ignored "-Wpacked" 105 #pragma GCC diagnostic ignored "-Wattributes" 106 __PACKED_STRUCT T_UINT32_READ { uint32_t v; }; 107 #pragma GCC diagnostic pop 108 #define __UNALIGNED_UINT32_READ(addr) (((const struct T_UINT32_READ *)(const void *)(addr))->v) 109 #endif 110 #ifndef __ALIGNED 111 #define __ALIGNED(x) __attribute__((aligned(x))) 112 #endif 113 #ifndef __RESTRICT 114 #define __RESTRICT __restrict 115 #endif 116 #ifndef __COMPILER_BARRIER 117 #define __COMPILER_BARRIER() __ASM volatile("":::"memory") 118 #endif 119 120 /* ######################### Startup and Lowlevel Init ######################## */ 121 122 #ifndef __PROGRAM_START 123 124 /** 125 \brief Initializes data and bss sections 126 \details This default implementations initialized all data and additional bss 127 sections relying on .copy.table and .zero.table specified properly 128 in the used linker script. 129 130 */ 131 __STATIC_FORCEINLINE __NO_RETURN void __cmsis_start(void) 132 { 133 extern void _start(void) __NO_RETURN; 134 135 typedef struct { 136 uint32_t const* src; 137 uint32_t* dest; 138 uint32_t wlen; 139 } __copy_table_t; 140 141 typedef struct { 142 uint32_t* dest; 143 uint32_t wlen; 144 } __zero_table_t; 145 146 extern const __copy_table_t __copy_table_start__; 147 extern const __copy_table_t __copy_table_end__; 148 extern const __zero_table_t __zero_table_start__; 149 extern const __zero_table_t __zero_table_end__; 150 151 for (__copy_table_t const* pTable = &__copy_table_start__; pTable < &__copy_table_end__; ++pTable) { 152 for(uint32_t i=0u; i<pTable->wlen; ++i) { 153 pTable->dest[i] = pTable->src[i]; 154 } 155 } 156 157 for (__zero_table_t const* pTable = &__zero_table_start__; pTable < &__zero_table_end__; ++pTable) { 158 for(uint32_t i=0u; i<pTable->wlen; ++i) { 159 pTable->dest[i] = 0u; 160 } 161 } 162 163 _start(); 164 } 165 166 #define __PROGRAM_START __cmsis_start 167 #endif 168 169 #ifndef __INITIAL_SP 170 #define __INITIAL_SP __StackTop 171 #endif 172 173 #ifndef __STACK_LIMIT 174 #define __STACK_LIMIT __StackLimit 175 #endif 176 177 #ifndef __VECTOR_TABLE 178 #define __VECTOR_TABLE __Vectors 179 #endif 180 181 #ifndef __VECTOR_TABLE_ATTRIBUTE 182 #define __VECTOR_TABLE_ATTRIBUTE __attribute__((used, section(".vectors"))) 183 #endif 184 185 #if defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3U) 186 #ifndef __STACK_SEAL 187 #define __STACK_SEAL __StackSeal 188 #endif 189 190 #ifndef __TZ_STACK_SEAL_SIZE 191 #define __TZ_STACK_SEAL_SIZE 8U 192 #endif 193 194 #ifndef __TZ_STACK_SEAL_VALUE 195 #define __TZ_STACK_SEAL_VALUE 0xFEF5EDA5FEF5EDA5ULL 196 #endif 197 198 199 __STATIC_FORCEINLINE void __TZ_set_STACKSEAL_S (uint32_t* stackTop) { 200 *((uint64_t *)stackTop) = __TZ_STACK_SEAL_VALUE; 201 } 202 #endif 203 204 205 /* ########################## Core Instruction Access ######################### */ 206 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface 207 Access to dedicated instructions 208 @{ 209 */ 210 211 /* Define macros for porting to both thumb1 and thumb2. 212 * For thumb1, use low register (r0-r7), specified by constraint "l" 213 * Otherwise, use general registers, specified by constraint "r" */ 214 #if defined (__thumb__) && !defined (__thumb2__) 215 #define __CMSIS_GCC_OUT_REG(r) "=l" (r) 216 #define __CMSIS_GCC_RW_REG(r) "+l" (r) 217 #define __CMSIS_GCC_USE_REG(r) "l" (r) 218 #else 219 #define __CMSIS_GCC_OUT_REG(r) "=r" (r) 220 #define __CMSIS_GCC_RW_REG(r) "+r" (r) 221 #define __CMSIS_GCC_USE_REG(r) "r" (r) 222 #endif 223 224 /** 225 \brief No Operation 226 \details No Operation does nothing. This instruction can be used for code alignment purposes. 227 */ 228 #define __NOP() __ASM volatile ("nop") 229 230 /** 231 \brief Wait For Interrupt 232 \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs. 233 */ 234 #define __WFI() __ASM volatile ("wfi":::"memory") 235 236 237 /** 238 \brief Wait For Event 239 \details Wait For Event is a hint instruction that permits the processor to enter 240 a low-power state until one of a number of events occurs. 241 */ 242 #define __WFE() __ASM volatile ("wfe":::"memory") 243 244 245 /** 246 \brief Send Event 247 \details Send Event is a hint instruction. It causes an event to be signaled to the CPU. 248 */ 249 #define __SEV() __ASM volatile ("sev") 250 251 252 /** 253 \brief Instruction Synchronization Barrier 254 \details Instruction Synchronization Barrier flushes the pipeline in the processor, 255 so that all instructions following the ISB are fetched from cache or memory, 256 after the instruction has been completed. 257 */ 258 __STATIC_FORCEINLINE void __ISB(void) 259 { 260 __ASM volatile ("isb 0xF":::"memory"); 261 } 262 263 264 /** 265 \brief Data Synchronization Barrier 266 \details Acts as a special kind of Data Memory Barrier. 267 It completes when all explicit memory accesses before this instruction complete. 268 */ 269 __STATIC_FORCEINLINE void __DSB(void) 270 { 271 __ASM volatile ("dsb 0xF":::"memory"); 272 } 273 274 275 /** 276 \brief Data Memory Barrier 277 \details Ensures the apparent order of the explicit memory operations before 278 and after the instruction, without ensuring their completion. 279 */ 280 __STATIC_FORCEINLINE void __DMB(void) 281 { 282 __ASM volatile ("dmb 0xF":::"memory"); 283 } 284 285 286 /** 287 \brief Reverse byte order (32 bit) 288 \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412. 289 \param [in] value Value to reverse 290 \return Reversed value 291 */ 292 __STATIC_FORCEINLINE uint32_t __REV(uint32_t value) 293 { 294 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) 295 return __builtin_bswap32(value); 296 #else 297 uint32_t result; 298 299 __ASM ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 300 return result; 301 #endif 302 } 303 304 305 /** 306 \brief Reverse byte order (16 bit) 307 \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856. 308 \param [in] value Value to reverse 309 \return Reversed value 310 */ 311 __STATIC_FORCEINLINE uint32_t __REV16(uint32_t value) 312 { 313 uint32_t result; 314 315 __ASM ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 316 return result; 317 } 318 319 320 /** 321 \brief Reverse byte order (16 bit) 322 \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000. 323 \param [in] value Value to reverse 324 \return Reversed value 325 */ 326 __STATIC_FORCEINLINE int16_t __REVSH(int16_t value) 327 { 328 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 329 return (int16_t)__builtin_bswap16(value); 330 #else 331 int16_t result; 332 333 __ASM ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 334 return result; 335 #endif 336 } 337 338 339 /** 340 \brief Rotate Right in unsigned value (32 bit) 341 \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits. 342 \param [in] op1 Value to rotate 343 \param [in] op2 Number of Bits to rotate 344 \return Rotated value 345 */ 346 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2) 347 { 348 op2 %= 32U; 349 if (op2 == 0U) 350 { 351 return op1; 352 } 353 return (op1 >> op2) | (op1 << (32U - op2)); 354 } 355 356 357 /** 358 \brief Breakpoint 359 \details Causes the processor to enter Debug state. 360 Debug tools can use this to investigate system state when the instruction at a particular address is reached. 361 \param [in] value is ignored by the processor. 362 If required, a debugger can use it to store additional information about the breakpoint. 363 */ 364 #define __BKPT(value) __ASM volatile ("bkpt "#value) 365 366 367 /** 368 \brief Reverse bit order of value 369 \details Reverses the bit order of the given value. 370 \param [in] value Value to reverse 371 \return Reversed value 372 */ 373 __STATIC_FORCEINLINE uint32_t __RBIT(uint32_t value) 374 { 375 uint32_t result; 376 377 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 378 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 379 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 380 __ASM ("rbit %0, %1" : "=r" (result) : "r" (value) ); 381 #else 382 uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */ 383 384 result = value; /* r will be reversed bits of v; first get LSB of v */ 385 for (value >>= 1U; value != 0U; value >>= 1U) 386 { 387 result <<= 1U; 388 result |= value & 1U; 389 s--; 390 } 391 result <<= s; /* shift when v's highest bits are zero */ 392 #endif 393 return result; 394 } 395 396 397 /** 398 \brief Count leading zeros 399 \details Counts the number of leading zeros of a data value. 400 \param [in] value Value to count the leading zeros 401 \return number of leading zeros in value 402 */ 403 __STATIC_FORCEINLINE uint8_t __CLZ(uint32_t value) 404 { 405 /* Even though __builtin_clz produces a CLZ instruction on ARM, formally 406 __builtin_clz(0) is undefined behaviour, so handle this case specially. 407 This guarantees ARM-compatible results if happening to compile on a non-ARM 408 target, and ensures the compiler doesn't decide to activate any 409 optimisations using the logic "value was passed to __builtin_clz, so it 410 is non-zero". 411 ARM GCC 7.3 and possibly earlier will optimise this test away, leaving a 412 single CLZ instruction. 413 */ 414 if (value == 0U) 415 { 416 return 32U; 417 } 418 return __builtin_clz(value); 419 } 420 421 422 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 423 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 424 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 425 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 426 /** 427 \brief LDR Exclusive (8 bit) 428 \details Executes a exclusive LDR instruction for 8 bit value. 429 \param [in] ptr Pointer to data 430 \return value of type uint8_t at (*ptr) 431 */ 432 __STATIC_FORCEINLINE uint8_t __LDREXB(volatile uint8_t *addr) 433 { 434 uint32_t result; 435 436 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 437 __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) ); 438 #else 439 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 440 accepted by assembler. So has to use following less efficient pattern. 441 */ 442 __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); 443 #endif 444 return ((uint8_t) result); /* Add explicit type cast here */ 445 } 446 447 448 /** 449 \brief LDR Exclusive (16 bit) 450 \details Executes a exclusive LDR instruction for 16 bit values. 451 \param [in] ptr Pointer to data 452 \return value of type uint16_t at (*ptr) 453 */ 454 __STATIC_FORCEINLINE uint16_t __LDREXH(volatile uint16_t *addr) 455 { 456 uint32_t result; 457 458 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 459 __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) ); 460 #else 461 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 462 accepted by assembler. So has to use following less efficient pattern. 463 */ 464 __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" ); 465 #endif 466 return ((uint16_t) result); /* Add explicit type cast here */ 467 } 468 469 470 /** 471 \brief LDR Exclusive (32 bit) 472 \details Executes a exclusive LDR instruction for 32 bit values. 473 \param [in] ptr Pointer to data 474 \return value of type uint32_t at (*ptr) 475 */ 476 __STATIC_FORCEINLINE uint32_t __LDREXW(volatile uint32_t *addr) 477 { 478 uint32_t result; 479 480 __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) ); 481 return(result); 482 } 483 484 485 /** 486 \brief STR Exclusive (8 bit) 487 \details Executes a exclusive STR instruction for 8 bit values. 488 \param [in] value Value to store 489 \param [in] ptr Pointer to location 490 \return 0 Function succeeded 491 \return 1 Function failed 492 */ 493 __STATIC_FORCEINLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr) 494 { 495 uint32_t result; 496 497 __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); 498 return(result); 499 } 500 501 502 /** 503 \brief STR Exclusive (16 bit) 504 \details Executes a exclusive STR instruction for 16 bit values. 505 \param [in] value Value to store 506 \param [in] ptr Pointer to location 507 \return 0 Function succeeded 508 \return 1 Function failed 509 */ 510 __STATIC_FORCEINLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr) 511 { 512 uint32_t result; 513 514 __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) ); 515 return(result); 516 } 517 518 519 /** 520 \brief STR Exclusive (32 bit) 521 \details Executes a exclusive STR instruction for 32 bit values. 522 \param [in] value Value to store 523 \param [in] ptr Pointer to location 524 \return 0 Function succeeded 525 \return 1 Function failed 526 */ 527 __STATIC_FORCEINLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr) 528 { 529 uint32_t result; 530 531 __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) ); 532 return(result); 533 } 534 535 536 /** 537 \brief Remove the exclusive lock 538 \details Removes the exclusive lock which is created by LDREX. 539 */ 540 __STATIC_FORCEINLINE void __CLREX(void) 541 { 542 __ASM volatile ("clrex" ::: "memory"); 543 } 544 545 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 546 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 547 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 548 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ 549 550 551 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 552 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 553 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 554 /** 555 \brief Signed Saturate 556 \details Saturates a signed value. 557 \param [in] ARG1 Value to be saturated 558 \param [in] ARG2 Bit position to saturate to (1..32) 559 \return Saturated value 560 */ 561 #define __SSAT(ARG1, ARG2) \ 562 __extension__ \ 563 ({ \ 564 int32_t __RES, __ARG1 = (ARG1); \ 565 __ASM volatile ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 566 __RES; \ 567 }) 568 569 570 /** 571 \brief Unsigned Saturate 572 \details Saturates an unsigned value. 573 \param [in] ARG1 Value to be saturated 574 \param [in] ARG2 Bit position to saturate to (0..31) 575 \return Saturated value 576 */ 577 #define __USAT(ARG1, ARG2) \ 578 __extension__ \ 579 ({ \ 580 uint32_t __RES, __ARG1 = (ARG1); \ 581 __ASM volatile ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 582 __RES; \ 583 }) 584 585 586 /** 587 \brief Rotate Right with Extend (32 bit) 588 \details Moves each bit of a bitstring right by one bit. 589 The carry input is shifted in at the left end of the bitstring. 590 \param [in] value Value to rotate 591 \return Rotated value 592 */ 593 __STATIC_FORCEINLINE uint32_t __RRX(uint32_t value) 594 { 595 uint32_t result; 596 597 __ASM volatile ("rrx %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) ); 598 return(result); 599 } 600 601 602 /** 603 \brief LDRT Unprivileged (8 bit) 604 \details Executes a Unprivileged LDRT instruction for 8 bit value. 605 \param [in] ptr Pointer to data 606 \return value of type uint8_t at (*ptr) 607 */ 608 __STATIC_FORCEINLINE uint8_t __LDRBT(volatile uint8_t *ptr) 609 { 610 uint32_t result; 611 612 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 613 __ASM volatile ("ldrbt %0, %1" : "=r" (result) : "Q" (*ptr) ); 614 #else 615 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 616 accepted by assembler. So has to use following less efficient pattern. 617 */ 618 __ASM volatile ("ldrbt %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" ); 619 #endif 620 return ((uint8_t) result); /* Add explicit type cast here */ 621 } 622 623 624 /** 625 \brief LDRT Unprivileged (16 bit) 626 \details Executes a Unprivileged LDRT instruction for 16 bit values. 627 \param [in] ptr Pointer to data 628 \return value of type uint16_t at (*ptr) 629 */ 630 __STATIC_FORCEINLINE uint16_t __LDRHT(volatile uint16_t *ptr) 631 { 632 uint32_t result; 633 634 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8) 635 __ASM volatile ("ldrht %0, %1" : "=r" (result) : "Q" (*ptr) ); 636 #else 637 /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not 638 accepted by assembler. So has to use following less efficient pattern. 639 */ 640 __ASM volatile ("ldrht %0, [%1]" : "=r" (result) : "r" (ptr) : "memory" ); 641 #endif 642 return ((uint16_t) result); /* Add explicit type cast here */ 643 } 644 645 646 /** 647 \brief LDRT Unprivileged (32 bit) 648 \details Executes a Unprivileged LDRT instruction for 32 bit values. 649 \param [in] ptr Pointer to data 650 \return value of type uint32_t at (*ptr) 651 */ 652 __STATIC_FORCEINLINE uint32_t __LDRT(volatile uint32_t *ptr) 653 { 654 uint32_t result; 655 656 __ASM volatile ("ldrt %0, %1" : "=r" (result) : "Q" (*ptr) ); 657 return(result); 658 } 659 660 661 /** 662 \brief STRT Unprivileged (8 bit) 663 \details Executes a Unprivileged STRT instruction for 8 bit values. 664 \param [in] value Value to store 665 \param [in] ptr Pointer to location 666 */ 667 __STATIC_FORCEINLINE void __STRBT(uint8_t value, volatile uint8_t *ptr) 668 { 669 __ASM volatile ("strbt %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); 670 } 671 672 673 /** 674 \brief STRT Unprivileged (16 bit) 675 \details Executes a Unprivileged STRT instruction for 16 bit values. 676 \param [in] value Value to store 677 \param [in] ptr Pointer to location 678 */ 679 __STATIC_FORCEINLINE void __STRHT(uint16_t value, volatile uint16_t *ptr) 680 { 681 __ASM volatile ("strht %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) ); 682 } 683 684 685 /** 686 \brief STRT Unprivileged (32 bit) 687 \details Executes a Unprivileged STRT instruction for 32 bit values. 688 \param [in] value Value to store 689 \param [in] ptr Pointer to location 690 */ 691 __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr) 692 { 693 __ASM volatile ("strt %1, %0" : "=Q" (*ptr) : "r" (value) ); 694 } 695 696 #else /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 697 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 698 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */ 699 700 /** 701 \brief Signed Saturate 702 \details Saturates a signed value. 703 \param [in] value Value to be saturated 704 \param [in] sat Bit position to saturate to (1..32) 705 \return Saturated value 706 */ 707 __STATIC_FORCEINLINE int32_t __SSAT(int32_t val, uint32_t sat) 708 { 709 if ((sat >= 1U) && (sat <= 32U)) 710 { 711 const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U); 712 const int32_t min = -1 - max ; 713 if (val > max) 714 { 715 return max; 716 } 717 else if (val < min) 718 { 719 return min; 720 } 721 } 722 return val; 723 } 724 725 /** 726 \brief Unsigned Saturate 727 \details Saturates an unsigned value. 728 \param [in] value Value to be saturated 729 \param [in] sat Bit position to saturate to (0..31) 730 \return Saturated value 731 */ 732 __STATIC_FORCEINLINE uint32_t __USAT(int32_t val, uint32_t sat) 733 { 734 if (sat <= 31U) 735 { 736 const uint32_t max = ((1U << sat) - 1U); 737 if (val > (int32_t)max) 738 { 739 return max; 740 } 741 else if (val < 0) 742 { 743 return 0U; 744 } 745 } 746 return (uint32_t)val; 747 } 748 749 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 750 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 751 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */ 752 753 754 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 755 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 756 /** 757 \brief Load-Acquire (8 bit) 758 \details Executes a LDAB instruction for 8 bit value. 759 \param [in] ptr Pointer to data 760 \return value of type uint8_t at (*ptr) 761 */ 762 __STATIC_FORCEINLINE uint8_t __LDAB(volatile uint8_t *ptr) 763 { 764 uint32_t result; 765 766 __ASM volatile ("ldab %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 767 return ((uint8_t) result); 768 } 769 770 771 /** 772 \brief Load-Acquire (16 bit) 773 \details Executes a LDAH instruction for 16 bit values. 774 \param [in] ptr Pointer to data 775 \return value of type uint16_t at (*ptr) 776 */ 777 __STATIC_FORCEINLINE uint16_t __LDAH(volatile uint16_t *ptr) 778 { 779 uint32_t result; 780 781 __ASM volatile ("ldah %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 782 return ((uint16_t) result); 783 } 784 785 786 /** 787 \brief Load-Acquire (32 bit) 788 \details Executes a LDA instruction for 32 bit values. 789 \param [in] ptr Pointer to data 790 \return value of type uint32_t at (*ptr) 791 */ 792 __STATIC_FORCEINLINE uint32_t __LDA(volatile uint32_t *ptr) 793 { 794 uint32_t result; 795 796 __ASM volatile ("lda %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 797 return(result); 798 } 799 800 801 /** 802 \brief Store-Release (8 bit) 803 \details Executes a STLB instruction for 8 bit values. 804 \param [in] value Value to store 805 \param [in] ptr Pointer to location 806 */ 807 __STATIC_FORCEINLINE void __STLB(uint8_t value, volatile uint8_t *ptr) 808 { 809 __ASM volatile ("stlb %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 810 } 811 812 813 /** 814 \brief Store-Release (16 bit) 815 \details Executes a STLH instruction for 16 bit values. 816 \param [in] value Value to store 817 \param [in] ptr Pointer to location 818 */ 819 __STATIC_FORCEINLINE void __STLH(uint16_t value, volatile uint16_t *ptr) 820 { 821 __ASM volatile ("stlh %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 822 } 823 824 825 /** 826 \brief Store-Release (32 bit) 827 \details Executes a STL instruction for 32 bit values. 828 \param [in] value Value to store 829 \param [in] ptr Pointer to location 830 */ 831 __STATIC_FORCEINLINE void __STL(uint32_t value, volatile uint32_t *ptr) 832 { 833 __ASM volatile ("stl %1, %0" : "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 834 } 835 836 837 /** 838 \brief Load-Acquire Exclusive (8 bit) 839 \details Executes a LDAB exclusive instruction for 8 bit value. 840 \param [in] ptr Pointer to data 841 \return value of type uint8_t at (*ptr) 842 */ 843 __STATIC_FORCEINLINE uint8_t __LDAEXB(volatile uint8_t *ptr) 844 { 845 uint32_t result; 846 847 __ASM volatile ("ldaexb %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 848 return ((uint8_t) result); 849 } 850 851 852 /** 853 \brief Load-Acquire Exclusive (16 bit) 854 \details Executes a LDAH exclusive instruction for 16 bit values. 855 \param [in] ptr Pointer to data 856 \return value of type uint16_t at (*ptr) 857 */ 858 __STATIC_FORCEINLINE uint16_t __LDAEXH(volatile uint16_t *ptr) 859 { 860 uint32_t result; 861 862 __ASM volatile ("ldaexh %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 863 return ((uint16_t) result); 864 } 865 866 867 /** 868 \brief Load-Acquire Exclusive (32 bit) 869 \details Executes a LDA exclusive instruction for 32 bit values. 870 \param [in] ptr Pointer to data 871 \return value of type uint32_t at (*ptr) 872 */ 873 __STATIC_FORCEINLINE uint32_t __LDAEX(volatile uint32_t *ptr) 874 { 875 uint32_t result; 876 877 __ASM volatile ("ldaex %0, %1" : "=r" (result) : "Q" (*ptr) : "memory" ); 878 return(result); 879 } 880 881 882 /** 883 \brief Store-Release Exclusive (8 bit) 884 \details Executes a STLB exclusive instruction for 8 bit values. 885 \param [in] value Value to store 886 \param [in] ptr Pointer to location 887 \return 0 Function succeeded 888 \return 1 Function failed 889 */ 890 __STATIC_FORCEINLINE uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr) 891 { 892 uint32_t result; 893 894 __ASM volatile ("stlexb %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 895 return(result); 896 } 897 898 899 /** 900 \brief Store-Release Exclusive (16 bit) 901 \details Executes a STLH exclusive instruction for 16 bit values. 902 \param [in] value Value to store 903 \param [in] ptr Pointer to location 904 \return 0 Function succeeded 905 \return 1 Function failed 906 */ 907 __STATIC_FORCEINLINE uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr) 908 { 909 uint32_t result; 910 911 __ASM volatile ("stlexh %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 912 return(result); 913 } 914 915 916 /** 917 \brief Store-Release Exclusive (32 bit) 918 \details Executes a STL exclusive instruction for 32 bit values. 919 \param [in] value Value to store 920 \param [in] ptr Pointer to location 921 \return 0 Function succeeded 922 \return 1 Function failed 923 */ 924 __STATIC_FORCEINLINE uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr) 925 { 926 uint32_t result; 927 928 __ASM volatile ("stlex %0, %2, %1" : "=&r" (result), "=Q" (*ptr) : "r" ((uint32_t)value) : "memory" ); 929 return(result); 930 } 931 932 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 933 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ 934 935 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */ 936 937 938 /* ########################### Core Function Access ########################### */ 939 /** \ingroup CMSIS_Core_FunctionInterface 940 \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions 941 @{ 942 */ 943 944 /** 945 \brief Enable IRQ Interrupts 946 \details Enables IRQ interrupts by clearing special-purpose register PRIMASK. 947 Can only be executed in Privileged modes. 948 */ 949 __STATIC_FORCEINLINE void __enable_irq(void) 950 { 951 __ASM volatile ("cpsie i" : : : "memory"); 952 } 953 954 955 /** 956 \brief Disable IRQ Interrupts 957 \details Disables IRQ interrupts by setting special-purpose register PRIMASK. 958 Can only be executed in Privileged modes. 959 */ 960 __STATIC_FORCEINLINE void __disable_irq(void) 961 { 962 __ASM volatile ("cpsid i" : : : "memory"); 963 } 964 965 966 /** 967 \brief Get Control Register 968 \details Returns the content of the Control Register. 969 \return Control Register value 970 */ 971 __STATIC_FORCEINLINE uint32_t __get_CONTROL(void) 972 { 973 uint32_t result; 974 975 __ASM volatile ("MRS %0, control" : "=r" (result) ); 976 return(result); 977 } 978 979 980 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 981 /** 982 \brief Get Control Register (non-secure) 983 \details Returns the content of the non-secure Control Register when in secure mode. 984 \return non-secure Control Register value 985 */ 986 __STATIC_FORCEINLINE uint32_t __TZ_get_CONTROL_NS(void) 987 { 988 uint32_t result; 989 990 __ASM volatile ("MRS %0, control_ns" : "=r" (result) ); 991 return(result); 992 } 993 #endif 994 995 996 /** 997 \brief Set Control Register 998 \details Writes the given value to the Control Register. 999 \param [in] control Control Register value to set 1000 */ 1001 __STATIC_FORCEINLINE void __set_CONTROL(uint32_t control) 1002 { 1003 __ASM volatile ("MSR control, %0" : : "r" (control) : "memory"); 1004 __ISB(); 1005 } 1006 1007 1008 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1009 /** 1010 \brief Set Control Register (non-secure) 1011 \details Writes the given value to the non-secure Control Register when in secure state. 1012 \param [in] control Control Register value to set 1013 */ 1014 __STATIC_FORCEINLINE void __TZ_set_CONTROL_NS(uint32_t control) 1015 { 1016 __ASM volatile ("MSR control_ns, %0" : : "r" (control) : "memory"); 1017 __ISB(); 1018 } 1019 #endif 1020 1021 1022 /** 1023 \brief Get IPSR Register 1024 \details Returns the content of the IPSR Register. 1025 \return IPSR Register value 1026 */ 1027 __STATIC_FORCEINLINE uint32_t __get_IPSR(void) 1028 { 1029 uint32_t result; 1030 1031 __ASM volatile ("MRS %0, ipsr" : "=r" (result) ); 1032 return(result); 1033 } 1034 1035 1036 /** 1037 \brief Get APSR Register 1038 \details Returns the content of the APSR Register. 1039 \return APSR Register value 1040 */ 1041 __STATIC_FORCEINLINE uint32_t __get_APSR(void) 1042 { 1043 uint32_t result; 1044 1045 __ASM volatile ("MRS %0, apsr" : "=r" (result) ); 1046 return(result); 1047 } 1048 1049 1050 /** 1051 \brief Get xPSR Register 1052 \details Returns the content of the xPSR Register. 1053 \return xPSR Register value 1054 */ 1055 __STATIC_FORCEINLINE uint32_t __get_xPSR(void) 1056 { 1057 uint32_t result; 1058 1059 __ASM volatile ("MRS %0, xpsr" : "=r" (result) ); 1060 return(result); 1061 } 1062 1063 1064 /** 1065 \brief Get Process Stack Pointer 1066 \details Returns the current value of the Process Stack Pointer (PSP). 1067 \return PSP Register value 1068 */ 1069 __STATIC_FORCEINLINE uint32_t __get_PSP(void) 1070 { 1071 uint32_t result; 1072 1073 __ASM volatile ("MRS %0, psp" : "=r" (result) ); 1074 return(result); 1075 } 1076 1077 1078 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1079 /** 1080 \brief Get Process Stack Pointer (non-secure) 1081 \details Returns the current value of the non-secure Process Stack Pointer (PSP) when in secure state. 1082 \return PSP Register value 1083 */ 1084 __STATIC_FORCEINLINE uint32_t __TZ_get_PSP_NS(void) 1085 { 1086 uint32_t result; 1087 1088 __ASM volatile ("MRS %0, psp_ns" : "=r" (result) ); 1089 return(result); 1090 } 1091 #endif 1092 1093 1094 /** 1095 \brief Set Process Stack Pointer 1096 \details Assigns the given value to the Process Stack Pointer (PSP). 1097 \param [in] topOfProcStack Process Stack Pointer value to set 1098 */ 1099 __STATIC_FORCEINLINE void __set_PSP(uint32_t topOfProcStack) 1100 { 1101 __ASM volatile ("MSR psp, %0" : : "r" (topOfProcStack) : ); 1102 } 1103 1104 1105 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1106 /** 1107 \brief Set Process Stack Pointer (non-secure) 1108 \details Assigns the given value to the non-secure Process Stack Pointer (PSP) when in secure state. 1109 \param [in] topOfProcStack Process Stack Pointer value to set 1110 */ 1111 __STATIC_FORCEINLINE void __TZ_set_PSP_NS(uint32_t topOfProcStack) 1112 { 1113 __ASM volatile ("MSR psp_ns, %0" : : "r" (topOfProcStack) : ); 1114 } 1115 #endif 1116 1117 1118 /** 1119 \brief Get Main Stack Pointer 1120 \details Returns the current value of the Main Stack Pointer (MSP). 1121 \return MSP Register value 1122 */ 1123 __STATIC_FORCEINLINE uint32_t __get_MSP(void) 1124 { 1125 uint32_t result; 1126 1127 __ASM volatile ("MRS %0, msp" : "=r" (result) ); 1128 return(result); 1129 } 1130 1131 1132 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1133 /** 1134 \brief Get Main Stack Pointer (non-secure) 1135 \details Returns the current value of the non-secure Main Stack Pointer (MSP) when in secure state. 1136 \return MSP Register value 1137 */ 1138 __STATIC_FORCEINLINE uint32_t __TZ_get_MSP_NS(void) 1139 { 1140 uint32_t result; 1141 1142 __ASM volatile ("MRS %0, msp_ns" : "=r" (result) ); 1143 return(result); 1144 } 1145 #endif 1146 1147 1148 /** 1149 \brief Set Main Stack Pointer 1150 \details Assigns the given value to the Main Stack Pointer (MSP). 1151 \param [in] topOfMainStack Main Stack Pointer value to set 1152 */ 1153 __STATIC_FORCEINLINE void __set_MSP(uint32_t topOfMainStack) 1154 { 1155 __ASM volatile ("MSR msp, %0" : : "r" (topOfMainStack) : ); 1156 } 1157 1158 1159 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1160 /** 1161 \brief Set Main Stack Pointer (non-secure) 1162 \details Assigns the given value to the non-secure Main Stack Pointer (MSP) when in secure state. 1163 \param [in] topOfMainStack Main Stack Pointer value to set 1164 */ 1165 __STATIC_FORCEINLINE void __TZ_set_MSP_NS(uint32_t topOfMainStack) 1166 { 1167 __ASM volatile ("MSR msp_ns, %0" : : "r" (topOfMainStack) : ); 1168 } 1169 #endif 1170 1171 1172 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1173 /** 1174 \brief Get Stack Pointer (non-secure) 1175 \details Returns the current value of the non-secure Stack Pointer (SP) when in secure state. 1176 \return SP Register value 1177 */ 1178 __STATIC_FORCEINLINE uint32_t __TZ_get_SP_NS(void) 1179 { 1180 uint32_t result; 1181 1182 __ASM volatile ("MRS %0, sp_ns" : "=r" (result) ); 1183 return(result); 1184 } 1185 1186 1187 /** 1188 \brief Set Stack Pointer (non-secure) 1189 \details Assigns the given value to the non-secure Stack Pointer (SP) when in secure state. 1190 \param [in] topOfStack Stack Pointer value to set 1191 */ 1192 __STATIC_FORCEINLINE void __TZ_set_SP_NS(uint32_t topOfStack) 1193 { 1194 __ASM volatile ("MSR sp_ns, %0" : : "r" (topOfStack) : ); 1195 } 1196 #endif 1197 1198 1199 /** 1200 \brief Get Priority Mask 1201 \details Returns the current state of the priority mask bit from the Priority Mask Register. 1202 \return Priority Mask value 1203 */ 1204 __STATIC_FORCEINLINE uint32_t __get_PRIMASK(void) 1205 { 1206 uint32_t result; 1207 1208 __ASM volatile ("MRS %0, primask" : "=r" (result) ); 1209 return(result); 1210 } 1211 1212 1213 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1214 /** 1215 \brief Get Priority Mask (non-secure) 1216 \details Returns the current state of the non-secure priority mask bit from the Priority Mask Register when in secure state. 1217 \return Priority Mask value 1218 */ 1219 __STATIC_FORCEINLINE uint32_t __TZ_get_PRIMASK_NS(void) 1220 { 1221 uint32_t result; 1222 1223 __ASM volatile ("MRS %0, primask_ns" : "=r" (result) ); 1224 return(result); 1225 } 1226 #endif 1227 1228 1229 /** 1230 \brief Set Priority Mask 1231 \details Assigns the given value to the Priority Mask Register. 1232 \param [in] priMask Priority Mask 1233 */ 1234 __STATIC_FORCEINLINE void __set_PRIMASK(uint32_t priMask) 1235 { 1236 __ASM volatile ("MSR primask, %0" : : "r" (priMask) : "memory"); 1237 } 1238 1239 1240 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1241 /** 1242 \brief Set Priority Mask (non-secure) 1243 \details Assigns the given value to the non-secure Priority Mask Register when in secure state. 1244 \param [in] priMask Priority Mask 1245 */ 1246 __STATIC_FORCEINLINE void __TZ_set_PRIMASK_NS(uint32_t priMask) 1247 { 1248 __ASM volatile ("MSR primask_ns, %0" : : "r" (priMask) : "memory"); 1249 } 1250 #endif 1251 1252 1253 #if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 1254 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 1255 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) 1256 /** 1257 \brief Enable FIQ 1258 \details Enables FIQ interrupts by clearing special-purpose register FAULTMASK. 1259 Can only be executed in Privileged modes. 1260 */ 1261 __STATIC_FORCEINLINE void __enable_fault_irq(void) 1262 { 1263 __ASM volatile ("cpsie f" : : : "memory"); 1264 } 1265 1266 1267 /** 1268 \brief Disable FIQ 1269 \details Disables FIQ interrupts by setting special-purpose register FAULTMASK. 1270 Can only be executed in Privileged modes. 1271 */ 1272 __STATIC_FORCEINLINE void __disable_fault_irq(void) 1273 { 1274 __ASM volatile ("cpsid f" : : : "memory"); 1275 } 1276 1277 1278 /** 1279 \brief Get Base Priority 1280 \details Returns the current value of the Base Priority register. 1281 \return Base Priority register value 1282 */ 1283 __STATIC_FORCEINLINE uint32_t __get_BASEPRI(void) 1284 { 1285 uint32_t result; 1286 1287 __ASM volatile ("MRS %0, basepri" : "=r" (result) ); 1288 return(result); 1289 } 1290 1291 1292 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1293 /** 1294 \brief Get Base Priority (non-secure) 1295 \details Returns the current value of the non-secure Base Priority register when in secure state. 1296 \return Base Priority register value 1297 */ 1298 __STATIC_FORCEINLINE uint32_t __TZ_get_BASEPRI_NS(void) 1299 { 1300 uint32_t result; 1301 1302 __ASM volatile ("MRS %0, basepri_ns" : "=r" (result) ); 1303 return(result); 1304 } 1305 #endif 1306 1307 1308 /** 1309 \brief Set Base Priority 1310 \details Assigns the given value to the Base Priority register. 1311 \param [in] basePri Base Priority value to set 1312 */ 1313 __STATIC_FORCEINLINE void __set_BASEPRI(uint32_t basePri) 1314 { 1315 __ASM volatile ("MSR basepri, %0" : : "r" (basePri) : "memory"); 1316 } 1317 1318 1319 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1320 /** 1321 \brief Set Base Priority (non-secure) 1322 \details Assigns the given value to the non-secure Base Priority register when in secure state. 1323 \param [in] basePri Base Priority value to set 1324 */ 1325 __STATIC_FORCEINLINE void __TZ_set_BASEPRI_NS(uint32_t basePri) 1326 { 1327 __ASM volatile ("MSR basepri_ns, %0" : : "r" (basePri) : "memory"); 1328 } 1329 #endif 1330 1331 1332 /** 1333 \brief Set Base Priority with condition 1334 \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled, 1335 or the new value increases the BASEPRI priority level. 1336 \param [in] basePri Base Priority value to set 1337 */ 1338 __STATIC_FORCEINLINE void __set_BASEPRI_MAX(uint32_t basePri) 1339 { 1340 __ASM volatile ("MSR basepri_max, %0" : : "r" (basePri) : "memory"); 1341 } 1342 1343 1344 /** 1345 \brief Get Fault Mask 1346 \details Returns the current value of the Fault Mask register. 1347 \return Fault Mask register value 1348 */ 1349 __STATIC_FORCEINLINE uint32_t __get_FAULTMASK(void) 1350 { 1351 uint32_t result; 1352 1353 __ASM volatile ("MRS %0, faultmask" : "=r" (result) ); 1354 return(result); 1355 } 1356 1357 1358 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1359 /** 1360 \brief Get Fault Mask (non-secure) 1361 \details Returns the current value of the non-secure Fault Mask register when in secure state. 1362 \return Fault Mask register value 1363 */ 1364 __STATIC_FORCEINLINE uint32_t __TZ_get_FAULTMASK_NS(void) 1365 { 1366 uint32_t result; 1367 1368 __ASM volatile ("MRS %0, faultmask_ns" : "=r" (result) ); 1369 return(result); 1370 } 1371 #endif 1372 1373 1374 /** 1375 \brief Set Fault Mask 1376 \details Assigns the given value to the Fault Mask register. 1377 \param [in] faultMask Fault Mask value to set 1378 */ 1379 __STATIC_FORCEINLINE void __set_FAULTMASK(uint32_t faultMask) 1380 { 1381 __ASM volatile ("MSR faultmask, %0" : : "r" (faultMask) : "memory"); 1382 } 1383 1384 1385 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1386 /** 1387 \brief Set Fault Mask (non-secure) 1388 \details Assigns the given value to the non-secure Fault Mask register when in secure state. 1389 \param [in] faultMask Fault Mask value to set 1390 */ 1391 __STATIC_FORCEINLINE void __TZ_set_FAULTMASK_NS(uint32_t faultMask) 1392 { 1393 __ASM volatile ("MSR faultmask_ns, %0" : : "r" (faultMask) : "memory"); 1394 } 1395 #endif 1396 1397 #endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__ == 1)) || \ 1398 (defined (__ARM_ARCH_7EM__ ) && (__ARM_ARCH_7EM__ == 1)) || \ 1399 (defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) ) */ 1400 1401 1402 #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 1403 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) 1404 1405 /** 1406 \brief Get Process Stack Pointer Limit 1407 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1408 Stack Pointer Limit register hence zero is returned always in non-secure 1409 mode. 1410 1411 \details Returns the current value of the Process Stack Pointer Limit (PSPLIM). 1412 \return PSPLIM Register value 1413 */ 1414 __STATIC_FORCEINLINE uint32_t __get_PSPLIM(void) 1415 { 1416 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1417 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1418 // without main extensions, the non-secure PSPLIM is RAZ/WI 1419 return 0U; 1420 #else 1421 uint32_t result; 1422 __ASM volatile ("MRS %0, psplim" : "=r" (result) ); 1423 return result; 1424 #endif 1425 } 1426 1427 #if (defined (__ARM_FEATURE_CMSE) && (__ARM_FEATURE_CMSE == 3)) 1428 /** 1429 \brief Get Process Stack Pointer Limit (non-secure) 1430 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1431 Stack Pointer Limit register hence zero is returned always. 1432 1433 \details Returns the current value of the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. 1434 \return PSPLIM Register value 1435 */ 1436 __STATIC_FORCEINLINE uint32_t __TZ_get_PSPLIM_NS(void) 1437 { 1438 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1439 // without main extensions, the non-secure PSPLIM is RAZ/WI 1440 return 0U; 1441 #else 1442 uint32_t result; 1443 __ASM volatile ("MRS %0, psplim_ns" : "=r" (result) ); 1444 return result; 1445 #endif 1446 } 1447 #endif 1448 1449 1450 /** 1451 \brief Set Process Stack Pointer Limit 1452 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1453 Stack Pointer Limit register hence the write is silently ignored in non-secure 1454 mode. 1455 1456 \details Assigns the given value to the Process Stack Pointer Limit (PSPLIM). 1457 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set 1458 */ 1459 __STATIC_FORCEINLINE void __set_PSPLIM(uint32_t ProcStackPtrLimit) 1460 { 1461 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1462 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1463 // without main extensions, the non-secure PSPLIM is RAZ/WI 1464 (void)ProcStackPtrLimit; 1465 #else 1466 __ASM volatile ("MSR psplim, %0" : : "r" (ProcStackPtrLimit)); 1467 #endif 1468 } 1469 1470 1471 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1472 /** 1473 \brief Set Process Stack Pointer (non-secure) 1474 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1475 Stack Pointer Limit register hence the write is silently ignored. 1476 1477 \details Assigns the given value to the non-secure Process Stack Pointer Limit (PSPLIM) when in secure state. 1478 \param [in] ProcStackPtrLimit Process Stack Pointer Limit value to set 1479 */ 1480 __STATIC_FORCEINLINE void __TZ_set_PSPLIM_NS(uint32_t ProcStackPtrLimit) 1481 { 1482 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1483 // without main extensions, the non-secure PSPLIM is RAZ/WI 1484 (void)ProcStackPtrLimit; 1485 #else 1486 __ASM volatile ("MSR psplim_ns, %0\n" : : "r" (ProcStackPtrLimit)); 1487 #endif 1488 } 1489 #endif 1490 1491 1492 /** 1493 \brief Get Main Stack Pointer Limit 1494 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1495 Stack Pointer Limit register hence zero is returned always in non-secure 1496 mode. 1497 1498 \details Returns the current value of the Main Stack Pointer Limit (MSPLIM). 1499 \return MSPLIM Register value 1500 */ 1501 __STATIC_FORCEINLINE uint32_t __get_MSPLIM(void) 1502 { 1503 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1504 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1505 // without main extensions, the non-secure MSPLIM is RAZ/WI 1506 return 0U; 1507 #else 1508 uint32_t result; 1509 __ASM volatile ("MRS %0, msplim" : "=r" (result) ); 1510 return result; 1511 #endif 1512 } 1513 1514 1515 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1516 /** 1517 \brief Get Main Stack Pointer Limit (non-secure) 1518 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1519 Stack Pointer Limit register hence zero is returned always. 1520 1521 \details Returns the current value of the non-secure Main Stack Pointer Limit(MSPLIM) when in secure state. 1522 \return MSPLIM Register value 1523 */ 1524 __STATIC_FORCEINLINE uint32_t __TZ_get_MSPLIM_NS(void) 1525 { 1526 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1527 // without main extensions, the non-secure MSPLIM is RAZ/WI 1528 return 0U; 1529 #else 1530 uint32_t result; 1531 __ASM volatile ("MRS %0, msplim_ns" : "=r" (result) ); 1532 return result; 1533 #endif 1534 } 1535 #endif 1536 1537 1538 /** 1539 \brief Set Main Stack Pointer Limit 1540 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1541 Stack Pointer Limit register hence the write is silently ignored in non-secure 1542 mode. 1543 1544 \details Assigns the given value to the Main Stack Pointer Limit (MSPLIM). 1545 \param [in] MainStackPtrLimit Main Stack Pointer Limit value to set 1546 */ 1547 __STATIC_FORCEINLINE void __set_MSPLIM(uint32_t MainStackPtrLimit) 1548 { 1549 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \ 1550 (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3))) 1551 // without main extensions, the non-secure MSPLIM is RAZ/WI 1552 (void)MainStackPtrLimit; 1553 #else 1554 __ASM volatile ("MSR msplim, %0" : : "r" (MainStackPtrLimit)); 1555 #endif 1556 } 1557 1558 1559 #if (defined (__ARM_FEATURE_CMSE ) && (__ARM_FEATURE_CMSE == 3)) 1560 /** 1561 \brief Set Main Stack Pointer Limit (non-secure) 1562 Devices without ARMv8-M Main Extensions (i.e. Cortex-M23) lack the non-secure 1563 Stack Pointer Limit register hence the write is silently ignored. 1564 1565 \details Assigns the given value to the non-secure Main Stack Pointer Limit (MSPLIM) when in secure state. 1566 \param [in] MainStackPtrLimit Main Stack Pointer value to set 1567 */ 1568 __STATIC_FORCEINLINE void __TZ_set_MSPLIM_NS(uint32_t MainStackPtrLimit) 1569 { 1570 #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1))) 1571 // without main extensions, the non-secure MSPLIM is RAZ/WI 1572 (void)MainStackPtrLimit; 1573 #else 1574 __ASM volatile ("MSR msplim_ns, %0" : : "r" (MainStackPtrLimit)); 1575 #endif 1576 } 1577 #endif 1578 1579 #endif /* ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \ 1580 (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1)) ) */ 1581 1582 1583 /** 1584 \brief Get FPSCR 1585 \details Returns the current value of the Floating Point Status/Control register. 1586 \return Floating Point Status/Control register value 1587 */ 1588 __STATIC_FORCEINLINE uint32_t __get_FPSCR(void) 1589 { 1590 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 1591 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 1592 #if __has_builtin(__builtin_arm_get_fpscr) 1593 // Re-enable using built-in when GCC has been fixed 1594 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2) 1595 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */ 1596 return __builtin_arm_get_fpscr(); 1597 #else 1598 uint32_t result; 1599 1600 __ASM volatile ("VMRS %0, fpscr" : "=r" (result) ); 1601 return(result); 1602 #endif 1603 #else 1604 return(0U); 1605 #endif 1606 } 1607 1608 1609 /** 1610 \brief Set FPSCR 1611 \details Assigns the given value to the Floating Point Status/Control register. 1612 \param [in] fpscr Floating Point Status/Control value to set 1613 */ 1614 __STATIC_FORCEINLINE void __set_FPSCR(uint32_t fpscr) 1615 { 1616 #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \ 1617 (defined (__FPU_USED ) && (__FPU_USED == 1U)) ) 1618 #if __has_builtin(__builtin_arm_set_fpscr) 1619 // Re-enable using built-in when GCC has been fixed 1620 // || (__GNUC__ > 7) || (__GNUC__ == 7 && __GNUC_MINOR__ >= 2) 1621 /* see https://gcc.gnu.org/ml/gcc-patches/2017-04/msg00443.html */ 1622 __builtin_arm_set_fpscr(fpscr); 1623 #else 1624 __ASM volatile ("VMSR fpscr, %0" : : "r" (fpscr) : "vfpcc", "memory"); 1625 #endif 1626 #else 1627 (void)fpscr; 1628 #endif 1629 } 1630 1631 1632 /*@} end of CMSIS_Core_RegAccFunctions */ 1633 1634 1635 /* ################### Compiler specific Intrinsics ########################### */ 1636 /** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics 1637 Access to dedicated SIMD instructions 1638 @{ 1639 */ 1640 1641 #if (defined (__ARM_FEATURE_DSP) && (__ARM_FEATURE_DSP == 1)) 1642 1643 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2) 1644 { 1645 uint32_t result; 1646 1647 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1648 return(result); 1649 } 1650 1651 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2) 1652 { 1653 uint32_t result; 1654 1655 __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1656 return(result); 1657 } 1658 1659 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2) 1660 { 1661 uint32_t result; 1662 1663 __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1664 return(result); 1665 } 1666 1667 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2) 1668 { 1669 uint32_t result; 1670 1671 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1672 return(result); 1673 } 1674 1675 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2) 1676 { 1677 uint32_t result; 1678 1679 __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1680 return(result); 1681 } 1682 1683 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2) 1684 { 1685 uint32_t result; 1686 1687 __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1688 return(result); 1689 } 1690 1691 1692 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2) 1693 { 1694 uint32_t result; 1695 1696 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1697 return(result); 1698 } 1699 1700 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2) 1701 { 1702 uint32_t result; 1703 1704 __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1705 return(result); 1706 } 1707 1708 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2) 1709 { 1710 uint32_t result; 1711 1712 __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1713 return(result); 1714 } 1715 1716 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2) 1717 { 1718 uint32_t result; 1719 1720 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1721 return(result); 1722 } 1723 1724 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2) 1725 { 1726 uint32_t result; 1727 1728 __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1729 return(result); 1730 } 1731 1732 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2) 1733 { 1734 uint32_t result; 1735 1736 __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1737 return(result); 1738 } 1739 1740 1741 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2) 1742 { 1743 uint32_t result; 1744 1745 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1746 return(result); 1747 } 1748 1749 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2) 1750 { 1751 uint32_t result; 1752 1753 __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1754 return(result); 1755 } 1756 1757 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2) 1758 { 1759 uint32_t result; 1760 1761 __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1762 return(result); 1763 } 1764 1765 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2) 1766 { 1767 uint32_t result; 1768 1769 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1770 return(result); 1771 } 1772 1773 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2) 1774 { 1775 uint32_t result; 1776 1777 __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1778 return(result); 1779 } 1780 1781 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2) 1782 { 1783 uint32_t result; 1784 1785 __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1786 return(result); 1787 } 1788 1789 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2) 1790 { 1791 uint32_t result; 1792 1793 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1794 return(result); 1795 } 1796 1797 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2) 1798 { 1799 uint32_t result; 1800 1801 __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1802 return(result); 1803 } 1804 1805 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2) 1806 { 1807 uint32_t result; 1808 1809 __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1810 return(result); 1811 } 1812 1813 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2) 1814 { 1815 uint32_t result; 1816 1817 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1818 return(result); 1819 } 1820 1821 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2) 1822 { 1823 uint32_t result; 1824 1825 __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1826 return(result); 1827 } 1828 1829 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2) 1830 { 1831 uint32_t result; 1832 1833 __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1834 return(result); 1835 } 1836 1837 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2) 1838 { 1839 uint32_t result; 1840 1841 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1842 return(result); 1843 } 1844 1845 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2) 1846 { 1847 uint32_t result; 1848 1849 __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1850 return(result); 1851 } 1852 1853 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2) 1854 { 1855 uint32_t result; 1856 1857 __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1858 return(result); 1859 } 1860 1861 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2) 1862 { 1863 uint32_t result; 1864 1865 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1866 return(result); 1867 } 1868 1869 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2) 1870 { 1871 uint32_t result; 1872 1873 __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1874 return(result); 1875 } 1876 1877 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2) 1878 { 1879 uint32_t result; 1880 1881 __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1882 return(result); 1883 } 1884 1885 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2) 1886 { 1887 uint32_t result; 1888 1889 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1890 return(result); 1891 } 1892 1893 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2) 1894 { 1895 uint32_t result; 1896 1897 __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1898 return(result); 1899 } 1900 1901 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2) 1902 { 1903 uint32_t result; 1904 1905 __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1906 return(result); 1907 } 1908 1909 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2) 1910 { 1911 uint32_t result; 1912 1913 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1914 return(result); 1915 } 1916 1917 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2) 1918 { 1919 uint32_t result; 1920 1921 __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1922 return(result); 1923 } 1924 1925 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2) 1926 { 1927 uint32_t result; 1928 1929 __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1930 return(result); 1931 } 1932 1933 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2) 1934 { 1935 uint32_t result; 1936 1937 __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1938 return(result); 1939 } 1940 1941 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3) 1942 { 1943 uint32_t result; 1944 1945 __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 1946 return(result); 1947 } 1948 1949 #define __SSAT16(ARG1, ARG2) \ 1950 __extension__ \ 1951 ({ \ 1952 int32_t __RES, __ARG1 = (ARG1); \ 1953 __ASM volatile ("ssat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 1954 __RES; \ 1955 }) 1956 1957 #define __USAT16(ARG1, ARG2) \ 1958 __extension__ \ 1959 ({ \ 1960 uint32_t __RES, __ARG1 = (ARG1); \ 1961 __ASM volatile ("usat16 %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) : "cc" ); \ 1962 __RES; \ 1963 }) 1964 1965 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1) 1966 { 1967 uint32_t result; 1968 1969 __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1)); 1970 return(result); 1971 } 1972 1973 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2) 1974 { 1975 uint32_t result; 1976 1977 __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 1978 return(result); 1979 } 1980 1981 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1) 1982 { 1983 uint32_t result; 1984 1985 __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1)); 1986 return(result); 1987 } 1988 1989 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate) 1990 { 1991 uint32_t result; 1992 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) { 1993 __ASM volatile ("sxtb16 %0, %1, ROR %2" : "=r" (result) : "r" (op1), "i" (rotate) ); 1994 } else { 1995 result = __SXTB16(__ROR(op1, rotate)) ; 1996 } 1997 return result; 1998 } 1999 2000 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2) 2001 { 2002 uint32_t result; 2003 2004 __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2005 return(result); 2006 } 2007 2008 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate) 2009 { 2010 uint32_t result; 2011 if (__builtin_constant_p(rotate) && ((rotate == 8U) || (rotate == 16U) || (rotate == 24U))) { 2012 __ASM volatile ("sxtab16 %0, %1, %2, ROR %3" : "=r" (result) : "r" (op1) , "r" (op2) , "i" (rotate)); 2013 } else { 2014 result = __SXTAB16(op1, __ROR(op2, rotate)); 2015 } 2016 return result; 2017 } 2018 2019 2020 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2) 2021 { 2022 uint32_t result; 2023 2024 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2025 return(result); 2026 } 2027 2028 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2) 2029 { 2030 uint32_t result; 2031 2032 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2033 return(result); 2034 } 2035 2036 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3) 2037 { 2038 uint32_t result; 2039 2040 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 2041 return(result); 2042 } 2043 2044 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3) 2045 { 2046 uint32_t result; 2047 2048 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 2049 return(result); 2050 } 2051 2052 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc) 2053 { 2054 union llreg_u{ 2055 uint32_t w32[2]; 2056 uint64_t w64; 2057 } llr; 2058 llr.w64 = acc; 2059 2060 #ifndef __ARMEB__ /* Little endian */ 2061 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 2062 #else /* Big endian */ 2063 __ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 2064 #endif 2065 2066 return(llr.w64); 2067 } 2068 2069 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc) 2070 { 2071 union llreg_u{ 2072 uint32_t w32[2]; 2073 uint64_t w64; 2074 } llr; 2075 llr.w64 = acc; 2076 2077 #ifndef __ARMEB__ /* Little endian */ 2078 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 2079 #else /* Big endian */ 2080 __ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 2081 #endif 2082 2083 return(llr.w64); 2084 } 2085 2086 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2) 2087 { 2088 uint32_t result; 2089 2090 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2091 return(result); 2092 } 2093 2094 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2) 2095 { 2096 uint32_t result; 2097 2098 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2099 return(result); 2100 } 2101 2102 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3) 2103 { 2104 uint32_t result; 2105 2106 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 2107 return(result); 2108 } 2109 2110 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3) 2111 { 2112 uint32_t result; 2113 2114 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); 2115 return(result); 2116 } 2117 2118 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc) 2119 { 2120 union llreg_u{ 2121 uint32_t w32[2]; 2122 uint64_t w64; 2123 } llr; 2124 llr.w64 = acc; 2125 2126 #ifndef __ARMEB__ /* Little endian */ 2127 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 2128 #else /* Big endian */ 2129 __ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 2130 #endif 2131 2132 return(llr.w64); 2133 } 2134 2135 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc) 2136 { 2137 union llreg_u{ 2138 uint32_t w32[2]; 2139 uint64_t w64; 2140 } llr; 2141 llr.w64 = acc; 2142 2143 #ifndef __ARMEB__ /* Little endian */ 2144 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op2) , "0" (llr.w32[0]), "1" (llr.w32[1]) ); 2145 #else /* Big endian */ 2146 __ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op2) , "0" (llr.w32[1]), "1" (llr.w32[0]) ); 2147 #endif 2148 2149 return(llr.w64); 2150 } 2151 2152 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2) 2153 { 2154 uint32_t result; 2155 2156 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2157 return(result); 2158 } 2159 2160 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2) 2161 { 2162 int32_t result; 2163 2164 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2165 return(result); 2166 } 2167 2168 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2) 2169 { 2170 int32_t result; 2171 2172 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); 2173 return(result); 2174 } 2175 2176 2177 #define __PKHBT(ARG1,ARG2,ARG3) \ 2178 __extension__ \ 2179 ({ \ 2180 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 2181 __ASM ("pkhbt %0, %1, %2, lsl %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 2182 __RES; \ 2183 }) 2184 2185 #define __PKHTB(ARG1,ARG2,ARG3) \ 2186 __extension__ \ 2187 ({ \ 2188 uint32_t __RES, __ARG1 = (ARG1), __ARG2 = (ARG2); \ 2189 if (ARG3 == 0) \ 2190 __ASM ("pkhtb %0, %1, %2" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2) ); \ 2191 else \ 2192 __ASM ("pkhtb %0, %1, %2, asr %3" : "=r" (__RES) : "r" (__ARG1), "r" (__ARG2), "I" (ARG3) ); \ 2193 __RES; \ 2194 }) 2195 2196 2197 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3) 2198 { 2199 int32_t result; 2200 2201 __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); 2202 return(result); 2203 } 2204 2205 #endif /* (__ARM_FEATURE_DSP == 1) */ 2206 /*@} end of group CMSIS_SIMD_intrinsics */ 2207 2208 2209 #pragma GCC diagnostic pop 2210 2211 #endif /* __CMSIS_GCC_H */