Contiki 3.x
core_cmInstr.h
Go to the documentation of this file.
1 /**************************************************************************//**
2  * @file core_cmInstr.h
3  * @brief CMSIS Cortex-M Core Instruction Access Header File
4  * @version V3.30
5  * @date 17. February 2014
6  *
7  * @note
8  *
9  ******************************************************************************/
10 /* Copyright (c) 2009 - 2014 ARM LIMITED
11 
12  All rights reserved.
13  Redistribution and use in source and binary forms, with or without
14  modification, are permitted provided that the following conditions are met:
15  - Redistributions of source code must retain the above copyright
16  notice, this list of conditions and the following disclaimer.
17  - Redistributions in binary form must reproduce the above copyright
18  notice, this list of conditions and the following disclaimer in the
19  documentation and/or other materials provided with the distribution.
20  - Neither the name of ARM nor the names of its contributors may be used
21  to endorse or promote products derived from this software without
22  specific prior written permission.
23  *
24  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27  ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
28  LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29  CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30  SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32  CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33  ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  POSSIBILITY OF SUCH DAMAGE.
35  ---------------------------------------------------------------------------*/
36 
37 
38 #ifndef __CORE_CMINSTR_H
39 #define __CORE_CMINSTR_H
40 
41 
42 /* ########################## Core Instruction Access ######################### */
43 /** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
44  Access to dedicated instructions
45  @{
46 */
47 
48 #if defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
49 /* ARM armcc specific functions */
50 
51 #if (__ARMCC_VERSION < 400677)
52  #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
53 #endif
54 
55 
56 /** \brief No Operation
57 
58  No Operation does nothing. This instruction can be used for code alignment purposes.
59  */
60 #define __NOP __nop
61 
62 
63 /** \brief Wait For Interrupt
64 
65  Wait For Interrupt is a hint instruction that suspends execution
66  until one of a number of events occurs.
67  */
68 #define __WFI __wfi
69 
70 
71 /** \brief Wait For Event
72 
73  Wait For Event is a hint instruction that permits the processor to enter
74  a low-power state until one of a number of events occurs.
75  */
76 #define __WFE __wfe
77 
78 
79 /** \brief Send Event
80 
81  Send Event is a hint instruction. It causes an event to be signaled to the CPU.
82  */
83 #define __SEV __sev
84 
85 
86 /** \brief Instruction Synchronization Barrier
87 
88  Instruction Synchronization Barrier flushes the pipeline in the processor,
89  so that all instructions following the ISB are fetched from cache or
90  memory, after the instruction has been completed.
91  */
92 #define __ISB() __isb(0xF)
93 
94 
95 /** \brief Data Synchronization Barrier
96 
97  This function acts as a special kind of Data Memory Barrier.
98  It completes when all explicit memory accesses before this instruction complete.
99  */
100 #define __DSB() __dsb(0xF)
101 
102 
103 /** \brief Data Memory Barrier
104 
105  This function ensures the apparent order of the explicit memory operations before
106  and after the instruction, without ensuring their completion.
107  */
108 #define __DMB() __dmb(0xF)
109 
110 
111 /** \brief Reverse byte order (32 bit)
112 
113  This function reverses the byte order in integer value.
114 
115  \param [in] value Value to reverse
116  \return Reversed value
117  */
118 #define __REV __rev
119 
120 
121 /** \brief Reverse byte order (16 bit)
122 
123  This function reverses the byte order in two unsigned short values.
124 
125  \param [in] value Value to reverse
126  \return Reversed value
127  */
128 #ifndef __NO_EMBEDDED_ASM
129 __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
130 {
131  rev16 r0, r0
132  bx lr
133 }
134 #endif
135 
136 /** \brief Reverse byte order in signed short value
137 
138  This function reverses the byte order in a signed short value with sign extension to integer.
139 
140  \param [in] value Value to reverse
141  \return Reversed value
142  */
143 #ifndef __NO_EMBEDDED_ASM
144 __attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
145 {
146  revsh r0, r0
147  bx lr
148 }
149 #endif
150 
151 
152 /** \brief Rotate Right in unsigned value (32 bit)
153 
154  This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
155 
156  \param [in] value Value to rotate
157  \param [in] value Number of Bits to rotate
158  \return Rotated value
159  */
160 #define __ROR __ror
161 
162 
163 /** \brief Breakpoint
164 
165  This function causes the processor to enter Debug state.
166  Debug tools can use this to investigate system state when the instruction at a particular address is reached.
167 
168  \param [in] value is ignored by the processor.
169  If required, a debugger can use it to store additional information about the breakpoint.
170  */
171 #define __BKPT(value) __breakpoint(value)
172 
173 
174 #if (__CORTEX_M >= 0x03)
175 
176 /** \brief Reverse bit order of value
177 
178  This function reverses the bit order of the given value.
179 
180  \param [in] value Value to reverse
181  \return Reversed value
182  */
183 #define __RBIT __rbit
184 
185 
186 /** \brief LDR Exclusive (8 bit)
187 
188  This function performs a exclusive LDR command for 8 bit value.
189 
190  \param [in] ptr Pointer to data
191  \return value of type uint8_t at (*ptr)
192  */
193 #define __LDREXB(ptr) ((uint8_t ) __ldrex(ptr))
194 
195 
196 /** \brief LDR Exclusive (16 bit)
197 
198  This function performs a exclusive LDR command for 16 bit values.
199 
200  \param [in] ptr Pointer to data
201  \return value of type uint16_t at (*ptr)
202  */
203 #define __LDREXH(ptr) ((uint16_t) __ldrex(ptr))
204 
205 
206 /** \brief LDR Exclusive (32 bit)
207 
208  This function performs a exclusive LDR command for 32 bit values.
209 
210  \param [in] ptr Pointer to data
211  \return value of type uint32_t at (*ptr)
212  */
213 #define __LDREXW(ptr) ((uint32_t ) __ldrex(ptr))
214 
215 
216 /** \brief STR Exclusive (8 bit)
217 
218  This function performs a exclusive STR command for 8 bit values.
219 
220  \param [in] value Value to store
221  \param [in] ptr Pointer to location
222  \return 0 Function succeeded
223  \return 1 Function failed
224  */
225 #define __STREXB(value, ptr) __strex(value, ptr)
226 
227 
228 /** \brief STR Exclusive (16 bit)
229 
230  This function performs a exclusive STR command for 16 bit values.
231 
232  \param [in] value Value to store
233  \param [in] ptr Pointer to location
234  \return 0 Function succeeded
235  \return 1 Function failed
236  */
237 #define __STREXH(value, ptr) __strex(value, ptr)
238 
239 
240 /** \brief STR Exclusive (32 bit)
241 
242  This function performs a exclusive STR command for 32 bit values.
243 
244  \param [in] value Value to store
245  \param [in] ptr Pointer to location
246  \return 0 Function succeeded
247  \return 1 Function failed
248  */
249 #define __STREXW(value, ptr) __strex(value, ptr)
250 
251 
252 /** \brief Remove the exclusive lock
253 
254  This function removes the exclusive lock which is created by LDREX.
255 
256  */
257 #define __CLREX __clrex
258 
259 
260 /** \brief Signed Saturate
261 
262  This function saturates a signed value.
263 
264  \param [in] value Value to be saturated
265  \param [in] sat Bit position to saturate to (1..32)
266  \return Saturated value
267  */
268 #define __SSAT __ssat
269 
270 
271 /** \brief Unsigned Saturate
272 
273  This function saturates an unsigned value.
274 
275  \param [in] value Value to be saturated
276  \param [in] sat Bit position to saturate to (0..31)
277  \return Saturated value
278  */
279 #define __USAT __usat
280 
281 
282 /** \brief Count leading zeros
283 
284  This function counts the number of leading zeros of a data value.
285 
286  \param [in] value Value to count the leading zeros
287  \return number of leading zeros in value
288  */
289 #define __CLZ __clz
290 
291 #endif /* (__CORTEX_M >= 0x03) */
292 
293 
294 #elif defined ( __GNUC__ ) /*------------------ GNU Compiler ---------------------*/
295 /* GNU gcc specific functions */
296 
297 /* Define macros for porting to both thumb1 and thumb2.
298  * For thumb1, use low register (r0-r7), specified by constrant "l"
299  * Otherwise, use general registers, specified by constrant "r" */
300 #if defined (__thumb__) && !defined (__thumb2__)
301 #define __CMSIS_GCC_OUT_REG(r) "=l" (r)
302 #define __CMSIS_GCC_USE_REG(r) "l" (r)
303 #else
304 #define __CMSIS_GCC_OUT_REG(r) "=r" (r)
305 #define __CMSIS_GCC_USE_REG(r) "r" (r)
306 #endif
307 
308 /** \brief No Operation
309 
310  No Operation does nothing. This instruction can be used for code alignment purposes.
311  */
312 __attribute__( ( always_inline ) ) __STATIC_INLINE void __NOP(void)
313 {
314  __ASM volatile ("nop");
315 }
316 
317 
318 /** \brief Wait For Interrupt
319 
320  Wait For Interrupt is a hint instruction that suspends execution
321  until one of a number of events occurs.
322  */
323 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFI(void)
324 {
325  __ASM volatile ("wfi");
326 }
327 
328 
329 /** \brief Wait For Event
330 
331  Wait For Event is a hint instruction that permits the processor to enter
332  a low-power state until one of a number of events occurs.
333  */
334 __attribute__( ( always_inline ) ) __STATIC_INLINE void __WFE(void)
335 {
336  __ASM volatile ("wfe");
337 }
338 
339 
340 /** \brief Send Event
341 
342  Send Event is a hint instruction. It causes an event to be signaled to the CPU.
343  */
344 __attribute__( ( always_inline ) ) __STATIC_INLINE void __SEV(void)
345 {
346  __ASM volatile ("sev");
347 }
348 
349 
350 /** \brief Instruction Synchronization Barrier
351 
352  Instruction Synchronization Barrier flushes the pipeline in the processor,
353  so that all instructions following the ISB are fetched from cache or
354  memory, after the instruction has been completed.
355  */
356 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ISB(void)
357 {
358  __ASM volatile ("isb");
359 }
360 
361 
362 /** \brief Data Synchronization Barrier
363 
364  This function acts as a special kind of Data Memory Barrier.
365  It completes when all explicit memory accesses before this instruction complete.
366  */
367 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DSB(void)
368 {
369  __ASM volatile ("dsb");
370 }
371 
372 
373 /** \brief Data Memory Barrier
374 
375  This function ensures the apparent order of the explicit memory operations before
376  and after the instruction, without ensuring their completion.
377  */
378 __attribute__( ( always_inline ) ) __STATIC_INLINE void __DMB(void)
379 {
380  __ASM volatile ("dmb");
381 }
382 
383 
384 /** \brief Reverse byte order (32 bit)
385 
386  This function reverses the byte order in integer value.
387 
388  \param [in] value Value to reverse
389  \return Reversed value
390  */
391 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV(uint32_t value)
392 {
393 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
394  return __builtin_bswap32(value);
395 #else
396  uint32_t result;
397 
398  __ASM volatile ("rev %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
399  return(result);
400 #endif
401 }
402 
403 
404 /** \brief Reverse byte order (16 bit)
405 
406  This function reverses the byte order in two unsigned short values.
407 
408  \param [in] value Value to reverse
409  \return Reversed value
410  */
411 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __REV16(uint32_t value)
412 {
413  uint32_t result;
414 
415  __ASM volatile ("rev16 %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
416  return(result);
417 }
418 
419 
420 /** \brief Reverse byte order in signed short value
421 
422  This function reverses the byte order in a signed short value with sign extension to integer.
423 
424  \param [in] value Value to reverse
425  \return Reversed value
426  */
427 __attribute__( ( always_inline ) ) __STATIC_INLINE int32_t __REVSH(int32_t value)
428 {
429 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
430  return (short)__builtin_bswap16(value);
431 #else
432  uint32_t result;
433 
434  __ASM volatile ("revsh %0, %1" : __CMSIS_GCC_OUT_REG (result) : __CMSIS_GCC_USE_REG (value) );
435  return(result);
436 #endif
437 }
438 
439 
440 /** \brief Rotate Right in unsigned value (32 bit)
441 
442  This function Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
443 
444  \param [in] value Value to rotate
445  \param [in] value Number of Bits to rotate
446  \return Rotated value
447  */
448 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
449 {
450  return (op1 >> op2) | (op1 << (32 - op2));
451 }
452 
453 
454 /** \brief Breakpoint
455 
456  This function causes the processor to enter Debug state.
457  Debug tools can use this to investigate system state when the instruction at a particular address is reached.
458 
459  \param [in] value is ignored by the processor.
460  If required, a debugger can use it to store additional information about the breakpoint.
461  */
462 #define __BKPT(value) __ASM volatile ("bkpt "#value)
463 
464 
465 #if (__CORTEX_M >= 0x03)
466 
467 /** \brief Reverse bit order of value
468 
469  This function reverses the bit order of the given value.
470 
471  \param [in] value Value to reverse
472  \return Reversed value
473  */
474 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
475 {
476  uint32_t result;
477 
478  __ASM volatile ("rbit %0, %1" : "=r" (result) : "r" (value) );
479  return(result);
480 }
481 
482 
483 /** \brief LDR Exclusive (8 bit)
484 
485  This function performs a exclusive LDR command for 8 bit value.
486 
487  \param [in] ptr Pointer to data
488  \return value of type uint8_t at (*ptr)
489  */
490 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __LDREXB(volatile uint8_t *addr)
491 {
492  uint32_t result;
493 
494 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
495  __ASM volatile ("ldrexb %0, %1" : "=r" (result) : "Q" (*addr) );
496 #else
497  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
498  accepted by assembler. So has to use following less efficient pattern.
499  */
500  __ASM volatile ("ldrexb %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
501 #endif
502  return ((uint8_t) result); /* Add explicit type cast here */
503 }
504 
505 
506 /** \brief LDR Exclusive (16 bit)
507 
508  This function performs a exclusive LDR command for 16 bit values.
509 
510  \param [in] ptr Pointer to data
511  \return value of type uint16_t at (*ptr)
512  */
513 __attribute__( ( always_inline ) ) __STATIC_INLINE uint16_t __LDREXH(volatile uint16_t *addr)
514 {
515  uint32_t result;
516 
517 #if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
518  __ASM volatile ("ldrexh %0, %1" : "=r" (result) : "Q" (*addr) );
519 #else
520  /* Prior to GCC 4.8, "Q" will be expanded to [rx, #0] which is not
521  accepted by assembler. So has to use following less efficient pattern.
522  */
523  __ASM volatile ("ldrexh %0, [%1]" : "=r" (result) : "r" (addr) : "memory" );
524 #endif
525  return ((uint16_t) result); /* Add explicit type cast here */
526 }
527 
528 
529 /** \brief LDR Exclusive (32 bit)
530 
531  This function performs a exclusive LDR command for 32 bit values.
532 
533  \param [in] ptr Pointer to data
534  \return value of type uint32_t at (*ptr)
535  */
536 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __LDREXW(volatile uint32_t *addr)
537 {
538  uint32_t result;
539 
540  __ASM volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
541  return(result);
542 }
543 
544 
545 /** \brief STR Exclusive (8 bit)
546 
547  This function performs a exclusive STR command for 8 bit values.
548 
549  \param [in] value Value to store
550  \param [in] ptr Pointer to location
551  \return 0 Function succeeded
552  \return 1 Function failed
553  */
554 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr)
555 {
556  uint32_t result;
557 
558  __ASM volatile ("strexb %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
559  return(result);
560 }
561 
562 
563 /** \brief STR Exclusive (16 bit)
564 
565  This function performs a exclusive STR command for 16 bit values.
566 
567  \param [in] value Value to store
568  \param [in] ptr Pointer to location
569  \return 0 Function succeeded
570  \return 1 Function failed
571  */
572 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr)
573 {
574  uint32_t result;
575 
576  __ASM volatile ("strexh %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" ((uint32_t)value) );
577  return(result);
578 }
579 
580 
581 /** \brief STR Exclusive (32 bit)
582 
583  This function performs a exclusive STR command for 32 bit values.
584 
585  \param [in] value Value to store
586  \param [in] ptr Pointer to location
587  \return 0 Function succeeded
588  \return 1 Function failed
589  */
590 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr)
591 {
592  uint32_t result;
593 
594  __ASM volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
595  return(result);
596 }
597 
598 
599 /** \brief Remove the exclusive lock
600 
601  This function removes the exclusive lock which is created by LDREX.
602 
603  */
604 __attribute__( ( always_inline ) ) __STATIC_INLINE void __CLREX(void)
605 {
606  __ASM volatile ("clrex" ::: "memory");
607 }
608 
609 
610 /** \brief Signed Saturate
611 
612  This function saturates a signed value.
613 
614  \param [in] value Value to be saturated
615  \param [in] sat Bit position to saturate to (1..32)
616  \return Saturated value
617  */
618 #define __SSAT(ARG1,ARG2) \
619 ({ \
620  uint32_t __RES, __ARG1 = (ARG1); \
621  __ASM ("ssat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
622  __RES; \
623  })
624 
625 
626 /** \brief Unsigned Saturate
627 
628  This function saturates an unsigned value.
629 
630  \param [in] value Value to be saturated
631  \param [in] sat Bit position to saturate to (0..31)
632  \return Saturated value
633  */
634 #define __USAT(ARG1,ARG2) \
635 ({ \
636  uint32_t __RES, __ARG1 = (ARG1); \
637  __ASM ("usat %0, %1, %2" : "=r" (__RES) : "I" (ARG2), "r" (__ARG1) ); \
638  __RES; \
639  })
640 
641 
642 /** \brief Count leading zeros
643 
644  This function counts the number of leading zeros of a data value.
645 
646  \param [in] value Value to count the leading zeros
647  \return number of leading zeros in value
648  */
649 __attribute__( ( always_inline ) ) __STATIC_INLINE uint8_t __CLZ(uint32_t value)
650 {
651  uint32_t result;
652 
653  __ASM volatile ("clz %0, %1" : "=r" (result) : "r" (value) );
654  return ((uint8_t) result); /* Add explicit type cast here */
655 }
656 
657 #endif /* (__CORTEX_M >= 0x03) */
658 
659 
660 #elif defined ( __ICCARM__ ) /*------------------ ICC Compiler -------------------*/
661 /* IAR iccarm specific functions */
662 #include <cmsis_iar.h>
663 
664 
665 #elif defined ( __TMS470__ ) /*---------------- TI CCS Compiler ------------------*/
666 /* TI CCS specific functions */
667 #include <cmsis_ccs.h>
668 
669 
670 #elif defined ( __TASKING__ ) /*------------------ TASKING Compiler --------------*/
671 /* TASKING carm specific functions */
672 /*
673  * The CMSIS functions have been implemented as intrinsics in the compiler.
674  * Please use "carm -?i" to get an up to date list of all intrinsics,
675  * Including the CMSIS ones.
676  */
677 
678 
679 #elif defined ( __CSMC__ ) /*------------------ COSMIC Compiler -------------------*/
680 /* Cosmic specific functions */
681 #include <cmsis_csm.h>
682 
683 #endif
684 
685 /*@}*/ /* end of group CMSIS_Core_InstructionInterface */
686 
687 #endif /* __CORE_CMINSTR_H */
void __attribute__((interrupt))
This ISR handles most of the business interacting with the 1-wire bus.
Definition: onewire.c:174