discoverpixy
core_cmInstr.h
Go to the documentation of this file.
1 /**************************************************************************/
10 /* Copyright (c) 2009 - 2013 ARM LIMITED
11 
12  All rights reserved.
13  Redistribution and use in source and binary forms, with or without
14  modification, are permitted provided that the following conditions are met:
15  - Redistributions of source code must retain the above copyright
16  notice, this list of conditions and the following disclaimer.
17  - Redistributions in binary form must reproduce the above copyright
18  notice, this list of conditions and the following disclaimer in the
19  documentation and/or other materials provided with the distribution.
20  - Neither the name of ARM nor the names of its contributors may be used
21  to endorse or promote products derived from this software without
22  specific prior written permission.
23  *
24  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27  ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28  LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29  CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30  SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32  CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33  ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  POSSIBILITY OF SUCH DAMAGE.
35  ---------------------------------------------------------------------------*/
36 
37 
38 #ifndef __CORE_CMINSTR_H
39 #define __CORE_CMINSTR_H
40 
41 
42 /* ########################## Core Instruction Access ######################### */
48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49 /* ARM armcc specific functions */
50 
51 #if (__ARMCC_VERSION < 400677)
52  #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53 #endif
54 
55 
60 #define __NOP __nop
61 
62 
68 #define __WFI __wfi
69 
70 
76 #define __WFE __wfe
77 
78 
83 #define __SEV __sev
84 
85 
92 #define __ISB() __isb(0xF)
93 
94 
100 #define __DSB() __dsb(0xF)
101 
102 
108 #define __DMB() __dmb(0xF)
109 
110 
118 #define __REV __rev
119 
120 
128 #ifndef __NO_EMBEDDED_ASM
129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
130 {
131  rev16 r0, r0
132  bx lr
133 }
134 #endif
135 
143 #ifndef __NO_EMBEDDED_ASM
144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
145 {
146  revsh r0, r0
147  bx lr
148 }
149 #endif
150 
151 
160 #define __ROR __ror
161 
162 
171 #define __BKPT(value) __breakpoint(value)
172 
173 
174 #if (__CORTEX_M >= 0x03)
175 
183 #define __RBIT __rbit
184 
185 
193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
194 
195 
203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
204 
205 
213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
214 
215 
225 #define __STREXB(value, ptr) __strex(value, ptr)
226 
227 
237 #define __STREXH(value, ptr) __strex(value, ptr)
238 
239 
249 #define __STREXW(value, ptr) __strex(value, ptr)
250 
251 
257 #define __CLREX __clrex
258 
259 
268 #define __SSAT __ssat
269 
270 
279 #define __USAT __usat
280 
281 
289 #define __CLZ __clz
290 
291 #endif /* (__CORTEX_M >= 0x03) */
292 
293 
294 
295 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
296 /* IAR iccarm specific functions */
297 
298 #include <cmsis_iar.h>
299 
300 
301 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
302 /* TI CCS specific functions */
303 
304 #include <cmsis_ccs.h>
305 
306 
307 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
308 /* GNU gcc specific functions */
309 
310 /* Define macros for porting to both thumb1 and thumb2.
311  * For thumb1, use low register (r0-r7), specified by constrant "l"
312  * Otherwise, use general registers, specified by constrant "r" */
313 #if defined (__thumb__) && !defined (__thumb2__)
314 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
315 #define __CMSIS_GCC_USE_REG(r) "l" (r)
316 #else
317 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
318 #define __CMSIS_GCC_USE_REG(r) "r" (r)
319 #endif
320 
325 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
326 {
327  __ASM volatile ("nop");
328 }
329 
330 
336 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
337 {
338  __ASM volatile ("wfi");
339 }
340 
341 
347 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
348 {
349  __ASM volatile ("wfe");
350 }
351 
352 
357 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
358 {
359  __ASM volatile ("sev");
360 }
361 
362 
369 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
370 {
371  __ASM volatile ("isb");
372 }
373 
374 
380 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
381 {
382  __ASM volatile ("dsb");
383 }
384 
385 
391 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
392 {
393  __ASM volatile ("dmb");
394 }
395 
396 
404 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
405 {
406 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
407  return __builtin_bswap32(value);
408 #else
409  uint32_t result;
410 
411  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
412  return(result);
413 #endif
414 }
415 
416 
424 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
425 {
426  uint32_t result;
427 
428  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
429  return(result);
430 }
431 
432 
440 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
441 {
442 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
443  return (short)__builtin_bswap16(value);
444 #else
445  uint32_t result;
446 
447  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
448  return(result);
449 #endif
450 }
451 
452 
461 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
462 {
463  return (op1 >> op2) | (op1 << (32 - op2));
464 }
465 
466 
475 #define __BKPT(value) __ASM volatile ("bkpt "#value)
476 
477 
478 #if (__CORTEX_M >= 0x03)
479 
487 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
488 {
489  uint32_t result;
490 
491  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
492  return(result);
493 }
494 
495 
503 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
504 {
505  uint32_t result;
506 
507 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
508  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
509 #else
510  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
511  accepted by assembler. So has to use following less efficient pattern.
512  */
513  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
514 #endif
515  return(result);
516 }
517 
518 
526 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
527 {
528  uint32_t result;
529 
530 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
531  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
532 #else
533  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
534  accepted by assembler. So has to use following less efficient pattern.
535  */
536  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
537 #endif
538  return(result);
539 }
540 
541 
549 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
550 {
551  uint32_t result;
552 
553  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
554  return(result);
555 }
556 
557 
567 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
568 {
569  uint32_t result;
570 
571  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
572  return(result);
573 }
574 
575 
585 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
586 {
587  uint32_t result;
588 
589  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
590  return(result);
591 }
592 
593 
603 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
604 {
605  uint32_t result;
606 
607  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
608  return(result);
609 }
610 
611 
617 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
618 {
619  __ASM volatile ("clrex" ::: "memory");
620 }
621 
622 
631 #define __SSAT(ARG1,ARG2) \
632 ({ \
633  uint32_t __RES, __ARG1 = (ARG1); \
634  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
635  __RES; \
636  })
637 
638 
647 #define __USAT(ARG1,ARG2) \
648 ({ \
649  uint32_t __RES, __ARG1 = (ARG1); \
650  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
651  __RES; \
652  })
653 
654 
662 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
663 {
664  uint32_t result;
665 
666  __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
667  return(result);
668 }
669 
670 #endif /* (__CORTEX_M >= 0x03) */
671 
672 
673 
674 
675 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
676 /* TASKING carm specific functions */
677 
678 /*
679  * The CMSIS functions have been implemented as intrinsics in the compiler.
680  * Please use "carm -?i" to get an up to date list of all intrinsics,
681  * Including the CMSIS ones.
682  */
683 
684 #endif
685  /* end of group CMSIS_Core_InstructionInterface */
687 
688 #endif /* __CORE_CMINSTR_H */