mbed library sources

Fork of mbed-src by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers core_caFunc.h Source File

core_caFunc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     core_caFunc.h
00003  * @brief    CMSIS Cortex-A Core Function Access Header File
00004  * @version  V3.10
00005  * @date     9 May 2013
00006  *
00007  * @note
00008  *
00009  ******************************************************************************/
00010 /* Copyright (c) 2009 - 2012 ARM LIMITED
00011 
00012    All rights reserved.
00013    Redistribution and use in source and binary forms, with or without
00014    modification, are permitted provided that the following conditions are met:
00015    - Redistributions of source code must retain the above copyright
00016      notice, this list of conditions and the following disclaimer.
00017    - Redistributions in binary form must reproduce the above copyright
00018      notice, this list of conditions and the following disclaimer in the
00019      documentation and/or other materials provided with the distribution.
00020    - Neither the name of ARM nor the names of its contributors may be used
00021      to endorse or promote products derived from this software without
00022      specific prior written permission.
00023    *
00024    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00025    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00026    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00027    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
00028    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00029    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00030    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00031    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00032    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00033    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034    POSSIBILITY OF SUCH DAMAGE.
00035    ---------------------------------------------------------------------------*/
00036 
00037 
00038 #ifndef __CORE_CAFUNC_H__
00039 #define __CORE_CAFUNC_H__
00040 
00041 
00042 /* ###########################  Core Function Access  ########################### */
00043 /** \ingroup  CMSIS_Core_FunctionInterface
00044     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00045   @{
00046  */
00047 
00048 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
00049 /* ARM armcc specific functions */
00050 
00051 #if (__ARMCC_VERSION < 400677)
00052   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
00053 #endif
00054 
00055 #define MODE_USR 0x10
00056 #define MODE_FIQ 0x11
00057 #define MODE_IRQ 0x12
00058 #define MODE_SVC 0x13
00059 #define MODE_MON 0x16
00060 #define MODE_ABT 0x17
00061 #define MODE_HYP 0x1A
00062 #define MODE_UND 0x1B
00063 #define MODE_SYS 0x1F
00064 
00065 /** \brief  Get APSR Register
00066 
00067     This function returns the content of the APSR Register.
00068 
00069     \return               APSR Register value
00070  */
00071 __STATIC_INLINE uint32_t __get_APSR(void)
00072 {
00073   register uint32_t __regAPSR          __ASM("apsr");
00074   return(__regAPSR);
00075 }
00076 
00077 
00078 /** \brief  Get CPSR Register
00079 
00080     This function returns the content of the CPSR Register.
00081 
00082     \return               CPSR Register value
00083  */
00084 __STATIC_INLINE uint32_t __get_CPSR(void)
00085 {
00086   register uint32_t __regCPSR          __ASM("cpsr");
00087   return(__regCPSR);
00088 }
00089 
00090 /** \brief  Set Stack Pointer
00091 
00092     This function assigns the given value to the current stack pointer.
00093 
00094     \param [in]    topOfStack  Stack Pointer value to set
00095  */
00096 register uint32_t __regSP              __ASM("sp");
00097 __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00098 {
00099     __regSP = topOfStack;
00100 }
00101 
00102 
00103 /** \brief  Get link register
00104 
00105     This function returns the value of the link register
00106 
00107     \return    Value of link register
00108  */
00109 register uint32_t __reglr         __ASM("lr");
00110 __STATIC_INLINE uint32_t __get_LR(void)
00111 {
00112   return(__reglr);
00113 }
00114 
00115 /** \brief  Set link register
00116 
00117     This function sets the value of the link register
00118 
00119     \param [in]    lr  LR value to set
00120  */
00121 __STATIC_INLINE void __set_LR(uint32_t lr)
00122 {
00123   __reglr = lr;
00124 }
00125 
00126 /** \brief  Set Process Stack Pointer
00127 
00128     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00129 
00130     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00131  */
00132 __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
00133 {
00134     ARM
00135     PRESERVE8
00136 
00137     BIC     R0, R0, #7  ;ensure stack is 8-byte aligned
00138     MRS     R1, CPSR
00139     CPS     #MODE_SYS   ;no effect in USR mode
00140     MOV     SP, R0
00141     MSR     CPSR_c, R1  ;no effect in USR mode
00142     ISB
00143     BX      LR
00144 
00145 }
00146 
00147 /** \brief  Set User Mode
00148 
00149     This function changes the processor state to User Mode
00150 
00151     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00152  */
00153 __STATIC_ASM void __set_CPS_USR(void)
00154 {
00155     ARM 
00156 
00157     CPS  #MODE_USR  
00158     BX   LR
00159 }
00160 
00161 
00162 /** \brief  Enable FIQ
00163 
00164     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00165     Can only be executed in Privileged modes.
00166  */
00167 #define __enable_fault_irq                __enable_fiq
00168 
00169 
00170 /** \brief  Disable FIQ
00171 
00172     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00173     Can only be executed in Privileged modes.
00174  */
00175 #define __disable_fault_irq               __disable_fiq
00176 
00177 
00178 /** \brief  Get FPSCR
00179 
00180     This function returns the current value of the Floating Point Status/Control register.
00181 
00182     \return               Floating Point Status/Control register value
00183  */
00184 __STATIC_INLINE uint32_t __get_FPSCR(void)
00185 {
00186 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00187   register uint32_t __regfpscr         __ASM("fpscr");
00188   return(__regfpscr);
00189 #else
00190    return(0);
00191 #endif
00192 }
00193 
00194 
00195 /** \brief  Set FPSCR
00196 
00197     This function assigns the given value to the Floating Point Status/Control register.
00198 
00199     \param [in]    fpscr  Floating Point Status/Control value to set
00200  */
00201 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00202 {
00203 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00204   register uint32_t __regfpscr         __ASM("fpscr");
00205   __regfpscr = (fpscr);
00206 #endif
00207 }
00208 
00209 /** \brief  Get FPEXC
00210 
00211     This function returns the current value of the Floating Point Exception Control register.
00212 
00213     \return               Floating Point Exception Control register value
00214  */
00215 __STATIC_INLINE uint32_t __get_FPEXC(void)
00216 {
00217 #if (__FPU_PRESENT == 1)
00218   register uint32_t __regfpexc         __ASM("fpexc");
00219   return(__regfpexc);
00220 #else
00221    return(0);
00222 #endif
00223 }
00224 
00225 
00226 /** \brief  Set FPEXC
00227 
00228     This function assigns the given value to the Floating Point Exception Control register.
00229 
00230     \param [in]    fpscr  Floating Point Exception Control value to set
00231  */
00232 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00233 {
00234 #if (__FPU_PRESENT == 1)
00235   register uint32_t __regfpexc         __ASM("fpexc");
00236   __regfpexc = (fpexc);
00237 #endif
00238 }
00239 
00240 /** \brief  Get CPACR
00241 
00242     This function returns the current value of the Coprocessor Access Control register.
00243 
00244     \return               Coprocessor Access Control register value
00245  */
00246 __STATIC_INLINE uint32_t __get_CPACR(void)
00247 {
00248     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00249     return __regCPACR;
00250 }
00251 
00252 /** \brief  Set CPACR
00253 
00254     This function assigns the given value to the Coprocessor Access Control register.
00255 
00256     \param [in]    cpacr  Coporcessor Acccess Control value to set
00257  */
00258 __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00259 {
00260     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00261     __regCPACR = cpacr;
00262     __ISB();
00263 }
00264 
00265 /** \brief  Get CBAR
00266 
00267     This function returns the value of the Configuration Base Address register.
00268 
00269     \return               Configuration Base Address register value
00270  */
00271 __STATIC_INLINE uint32_t __get_CBAR() {
00272     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00273     return(__regCBAR);
00274 }
00275 
00276 /** \brief  Get TTBR0
00277 
00278     This function returns the value of the Configuration Base Address register.
00279 
00280     \return               Translation Table Base Register 0 value
00281  */
00282 __STATIC_INLINE uint32_t __get_TTBR0() {
00283     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00284     return(__regTTBR0);
00285 }
00286 
00287 /** \brief  Set TTBR0
00288 
00289     This function assigns the given value to the Coprocessor Access Control register.
00290 
00291     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00292  */
00293 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00294     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00295     __regTTBR0 = ttbr0;
00296     __ISB();
00297 }
00298 
00299 /** \brief  Get DACR
00300 
00301     This function returns the value of the Domain Access Control Register.
00302 
00303     \return               Domain Access Control Register value
00304  */
00305 __STATIC_INLINE uint32_t __get_DACR() {
00306     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00307     return(__regDACR);
00308 }
00309 
00310 /** \brief  Set DACR
00311 
00312     This function assigns the given value to the Coprocessor Access Control register.
00313 
00314     \param [in]    dacr   Domain Access Control Register value to set
00315  */
00316 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00317     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00318     __regDACR = dacr;
00319     __ISB();
00320 }
00321 
00322 /******************************** Cache and BTAC enable  ****************************************************/
00323 
00324 /** \brief  Set SCTLR
00325 
00326     This function assigns the given value to the System Control Register.
00327 
00328     \param [in]    sctlr  System Control Register, value to set
00329  */
00330 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00331 {
00332     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00333     __regSCTLR = sctlr;
00334 }
00335 
00336 /** \brief  Get SCTLR
00337 
00338     This function returns the value of the System Control Register.
00339 
00340     \return               System Control Register value
00341  */
00342 __STATIC_INLINE uint32_t __get_SCTLR() {
00343     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00344     return(__regSCTLR);
00345 }
00346 
00347 /** \brief  Enable Caches
00348 
00349     Enable Caches
00350  */
00351 __STATIC_INLINE void __enable_caches(void) {
00352     // Set I bit 12 to enable I Cache
00353     // Set C bit  2 to enable D Cache
00354     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00355 }
00356 
00357 /** \brief  Disable Caches
00358 
00359     Disable Caches
00360  */
00361 __STATIC_INLINE void __disable_caches(void) {
00362     // Clear I bit 12 to disable I Cache
00363     // Clear C bit  2 to disable D Cache
00364     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00365     __ISB();
00366 }
00367 
00368 /** \brief  Enable BTAC
00369 
00370     Enable BTAC
00371  */
00372 __STATIC_INLINE void __enable_btac(void) {
00373     // Set Z bit 11 to enable branch prediction
00374     __set_SCTLR( __get_SCTLR() | (1 << 11));
00375     __ISB();
00376 }
00377 
00378 /** \brief  Disable BTAC
00379 
00380     Disable BTAC
00381  */
00382 __STATIC_INLINE void __disable_btac(void) {
00383     // Clear Z bit 11 to disable branch prediction
00384     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
00385 }
00386 
00387 
00388 /** \brief  Enable MMU
00389 
00390     Enable MMU
00391  */
00392 __STATIC_INLINE void __enable_mmu(void) {
00393     // Set M bit 0 to enable the MMU
00394     // Set AFE bit to enable simplified access permissions model
00395     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00396     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00397     __ISB();
00398 }
00399 
00400 /** \brief  Enable MMU
00401 
00402     Enable MMU
00403  */
00404 __STATIC_INLINE void __disable_mmu(void) {
00405     // Clear M bit 0 to disable the MMU
00406     __set_SCTLR( __get_SCTLR() & ~1);
00407     __ISB();
00408 }
00409 
00410 /******************************** TLB maintenance operations ************************************************/
00411 /** \brief  Invalidate the whole tlb
00412 
00413     TLBIALL. Invalidate the whole tlb
00414  */
00415 
00416 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00417     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
00418     __TLBIALL = 0;
00419     __DSB();
00420     __ISB();
00421 }
00422 
00423 /******************************** BTB maintenance operations ************************************************/
00424 /** \brief  Invalidate entire branch predictor array
00425 
00426     BPIALL. Branch Predictor Invalidate All.
00427  */
00428 
00429 __STATIC_INLINE void __v7_inv_btac(void) {
00430     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
00431     __BPIALL  = 0;
00432     __DSB();     //ensure completion of the invalidation
00433     __ISB();     //ensure instruction fetch path sees new state
00434 }
00435 
00436 
00437 /******************************** L1 cache operations ******************************************************/
00438 
00439 /** \brief  Invalidate the whole I$
00440 
00441     ICIALLU. Instruction Cache Invalidate All to PoU
00442  */
00443 __STATIC_INLINE void __v7_inv_icache_all(void) {
00444     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
00445     __ICIALLU = 0;
00446     __DSB();     //ensure completion of the invalidation
00447     __ISB();     //ensure instruction fetch path sees new I cache state
00448 }
00449 
00450 /** \brief  Clean D$ by MVA
00451 
00452     DCCMVAC. Data cache clean by MVA to PoC
00453  */
00454 __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
00455     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
00456     __DCCMVAC = (uint32_t)va;
00457     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00458 }
00459 
00460 /** \brief  Invalidate D$ by MVA
00461 
00462     DCIMVAC. Data cache invalidate by MVA to PoC
00463  */
00464 __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
00465     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
00466     __DCIMVAC = (uint32_t)va;
00467     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00468 }
00469 
00470 /** \brief  Clean and Invalidate D$ by MVA
00471 
00472     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00473  */
00474 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00475     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
00476     __DCCIMVAC = (uint32_t)va;
00477     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00478 }
00479 
00480 /** \brief
00481  * Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
00482  */
00483 #pragma push
00484 #pragma arm
00485 __STATIC_ASM void __v7_all_cache(uint32_t op) {
00486         ARM 
00487 
00488         PUSH    {R4-R11}
00489 
00490         MRC     p15, 1, R6, c0, c0, 1      // Read CLIDR
00491         ANDS    R3, R6, #0x07000000        // Extract coherency level
00492         MOV     R3, R3, LSR #23            // Total cache levels << 1
00493         BEQ     Finished                   // If 0, no need to clean
00494 
00495         MOV     R10, #0                    // R10 holds current cache level << 1
00496 Loop1   ADD     R2, R10, R10, LSR #1       // R2 holds cache "Set" position
00497         MOV     R1, R6, LSR R2             // Bottom 3 bits are the Cache-type for this level
00498         AND     R1, R1, #7                 // Isolate those lower 3 bits
00499         CMP     R1, #2
00500         BLT     Skip                       // No cache or only instruction cache at this level
00501 
00502         MCR     p15, 2, R10, c0, c0, 0     // Write the Cache Size selection register
00503         ISB                                // ISB to sync the change to the CacheSizeID reg
00504         MRC     p15, 1, R1, c0, c0, 0      // Reads current Cache Size ID register
00505         AND     R2, R1, #7                 // Extract the line length field
00506         ADD     R2, R2, #4                 // Add 4 for the line length offset (log2 16 bytes)
00507         LDR     R4, =0x3FF
00508         ANDS    R4, R4, R1, LSR #3         // R4 is the max number on the way size (right aligned)
00509         CLZ     R5, R4                     // R5 is the bit position of the way size increment
00510         LDR     R7, =0x7FFF
00511         ANDS    R7, R7, R1, LSR #13        // R7 is the max number of the index size (right aligned)
00512 
00513 Loop2   MOV     R9, R4                     // R9 working copy of the max way size (right aligned)
00514 
00515 Loop3   ORR     R11, R10, R9, LSL R5       // Factor in the Way number and cache number into R11
00516         ORR     R11, R11, R7, LSL R2       // Factor in the Set number
00517         CMP     R0, #0
00518         BNE     Dccsw
00519         MCR     p15, 0, R11, c7, c6, 2     // DCISW. Invalidate by Set/Way
00520         B       cont
00521 Dccsw   CMP     R0, #1
00522         BNE     Dccisw
00523         MCR     p15, 0, R11, c7, c10, 2    // DCCSW. Clean by Set/Way
00524         B       cont
00525 Dccisw  MCR     p15, 0, R11, c7, c14, 2    // DCCISW, Clean and Invalidate by Set/Way
00526 cont    SUBS    R9, R9, #1                 // Decrement the Way number
00527         BGE     Loop3
00528         SUBS    R7, R7, #1                 // Decrement the Set number
00529         BGE     Loop2
00530 Skip    ADD     R10, R10, #2               // increment the cache number
00531         CMP     R3, R10
00532         BGT     Loop1
00533 
00534 Finished
00535         DSB
00536         POP    {R4-R11}
00537         BX     lr
00538 
00539 }
00540 #pragma pop
00541 
00542 /** \brief  __v7_all_cache - helper function
00543 
00544  */
00545 
00546 /** \brief  Invalidate the whole D$
00547 
00548     DCISW. Invalidate by Set/Way
00549  */
00550 
00551 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00552     __v7_all_cache(0);
00553 }
00554 
00555 /** \brief  Clean the whole D$
00556 
00557     DCCSW. Clean by Set/Way
00558  */
00559 
00560 __STATIC_INLINE void __v7_clean_dcache_all(void) {
00561     __v7_all_cache(1);
00562 }
00563 
00564 /** \brief  Clean and invalidate the whole D$
00565 
00566     DCCISW. Clean and Invalidate by Set/Way
00567  */
00568 
00569 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
00570     __v7_all_cache(2);
00571 }
00572 
00573 #include "core_ca_mmu.h"
00574 
00575 #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
00576 
00577 #error IAR Compiler support not implemented for Cortex-A
00578 
00579 #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
00580 
00581 /* GNU gcc specific functions */
00582 
00583 #define MODE_USR 0x10
00584 #define MODE_FIQ 0x11
00585 #define MODE_IRQ 0x12
00586 #define MODE_SVC 0x13
00587 #define MODE_MON 0x16
00588 #define MODE_ABT 0x17
00589 #define MODE_HYP 0x1A
00590 #define MODE_UND 0x1B
00591 #define MODE_SYS 0x1F
00592 
00593 
00594 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
00595 {
00596     __ASM volatile ("cpsie i");
00597 }
00598 
00599 /** \brief  Disable IRQ Interrupts
00600 
00601   This function disables IRQ interrupts by setting the I-bit in the CPSR.
00602   Can only be executed in Privileged modes.
00603  */
00604 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
00605 {
00606     uint32_t result;
00607 
00608     __ASM volatile ("mrs %0, cpsr" : "=r" (result));
00609     __ASM volatile ("cpsid i");
00610     return(result & 0x80);
00611 }
00612 
00613 
00614 /** \brief  Get APSR Register
00615 
00616     This function returns the content of the APSR Register.
00617 
00618     \return               APSR Register value
00619  */
00620 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
00621 {
00622 #if 1
00623     uint32_t result;
00624 
00625     __ASM volatile ("mrs %0, apsr" : "=r" (result) );
00626     return (result);
00627 #else
00628   register uint32_t __regAPSR          __ASM("apsr");
00629   return(__regAPSR);
00630 #endif
00631 }
00632 
00633 
00634 /** \brief  Get CPSR Register
00635 
00636     This function returns the content of the CPSR Register.
00637 
00638     \return               CPSR Register value
00639  */
00640 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
00641 {
00642 #if 1
00643   register uint32_t __regCPSR;
00644   __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
00645 #else
00646   register uint32_t __regCPSR          __ASM("cpsr");
00647 #endif
00648   return(__regCPSR);
00649 }
00650 
00651 #if 0
00652 /** \brief  Set Stack Pointer
00653 
00654     This function assigns the given value to the current stack pointer.
00655 
00656     \param [in]    topOfStack  Stack Pointer value to set
00657  */
00658 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00659 {
00660     register uint32_t __regSP       __ASM("sp");
00661     __regSP = topOfStack;
00662 }
00663 #endif
00664 
00665 /** \brief  Get link register
00666 
00667     This function returns the value of the link register
00668 
00669     \return    Value of link register
00670  */
00671 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
00672 {
00673   register uint32_t __reglr         __ASM("lr");
00674   return(__reglr);
00675 }
00676 
00677 #if 0
00678 /** \brief  Set link register
00679 
00680     This function sets the value of the link register
00681 
00682     \param [in]    lr  LR value to set
00683  */
00684 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
00685 {
00686   register uint32_t __reglr         __ASM("lr");
00687   __reglr = lr;
00688 }
00689 #endif
00690 
00691 /** \brief  Set Process Stack Pointer
00692 
00693     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00694 
00695     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00696  */
00697 extern void __set_PSP(uint32_t topOfProcStack);
00698 
00699 /** \brief  Set User Mode
00700 
00701     This function changes the processor state to User Mode
00702 
00703     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00704  */
00705 extern void __set_CPS_USR(void);
00706 
00707 /** \brief  Enable FIQ
00708 
00709     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00710     Can only be executed in Privileged modes.
00711  */
00712 #define __enable_fault_irq                __enable_fiq
00713 
00714 
00715 /** \brief  Disable FIQ
00716 
00717     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00718     Can only be executed in Privileged modes.
00719  */
00720 #define __disable_fault_irq               __disable_fiq
00721 
00722 
00723 /** \brief  Get FPSCR
00724 
00725     This function returns the current value of the Floating Point Status/Control register.
00726 
00727     \return               Floating Point Status/Control register value
00728  */
00729 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
00730 {
00731 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00732 #if 1
00733     uint32_t result;
00734 
00735     __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
00736     return (result);
00737 #else
00738   register uint32_t __regfpscr         __ASM("fpscr");
00739   return(__regfpscr);
00740 #endif
00741 #else
00742    return(0);
00743 #endif
00744 }
00745 
00746 
00747 /** \brief  Set FPSCR
00748 
00749     This function assigns the given value to the Floating Point Status/Control register.
00750 
00751     \param [in]    fpscr  Floating Point Status/Control value to set
00752  */
00753 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00754 {
00755 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00756 #if 1
00757     __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
00758 #else
00759   register uint32_t __regfpscr         __ASM("fpscr");
00760   __regfpscr = (fpscr);
00761 #endif
00762 #endif
00763 }
00764 
00765 /** \brief  Get FPEXC
00766 
00767     This function returns the current value of the Floating Point Exception Control register.
00768 
00769     \return               Floating Point Exception Control register value
00770  */
00771 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
00772 {
00773 #if (__FPU_PRESENT == 1)
00774 #if 1
00775     uint32_t result;
00776 
00777     __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
00778     return (result);
00779 #else
00780   register uint32_t __regfpexc         __ASM("fpexc");
00781   return(__regfpexc);
00782 #endif
00783 #else
00784    return(0);
00785 #endif
00786 }
00787 
00788 
00789 /** \brief  Set FPEXC
00790 
00791     This function assigns the given value to the Floating Point Exception Control register.
00792 
00793     \param [in]    fpscr  Floating Point Exception Control value to set
00794  */
00795 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00796 {
00797 #if (__FPU_PRESENT == 1)
00798 #if 1
00799     __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
00800 #else
00801   register uint32_t __regfpexc         __ASM("fpexc");
00802   __regfpexc = (fpexc);
00803 #endif
00804 #endif
00805 }
00806 
00807 /** \brief  Get CPACR
00808 
00809     This function returns the current value of the Coprocessor Access Control register.
00810 
00811     \return               Coprocessor Access Control register value
00812  */
00813 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
00814 {
00815 #if 1
00816     register uint32_t __regCPACR;
00817     __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
00818 #else
00819     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00820 #endif
00821     return __regCPACR;
00822 }
00823 
00824 /** \brief  Set CPACR
00825 
00826     This function assigns the given value to the Coprocessor Access Control register.
00827 
00828     \param [in]    cpacr  Coporcessor Acccess Control value to set
00829  */
00830 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00831 {
00832 #if 1
00833     __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
00834 #else
00835     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00836     __regCPACR = cpacr;
00837 #endif
00838     __ISB();
00839 }
00840 
00841 /** \brief  Get CBAR
00842 
00843     This function returns the value of the Configuration Base Address register.
00844 
00845     \return               Configuration Base Address register value
00846  */
00847 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
00848 #if 1
00849     register uint32_t __regCBAR;
00850     __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
00851 #else
00852     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00853 #endif
00854     return(__regCBAR);
00855 }
00856 
00857 /** \brief  Get TTBR0
00858 
00859     This function returns the value of the Configuration Base Address register.
00860 
00861     \return               Translation Table Base Register 0 value
00862  */
00863 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
00864 #if 1
00865     register uint32_t __regTTBR0;
00866     __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
00867 #else
00868     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00869 #endif
00870     return(__regTTBR0);
00871 }
00872 
00873 /** \brief  Set TTBR0
00874 
00875     This function assigns the given value to the Coprocessor Access Control register.
00876 
00877     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00878  */
00879 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00880 #if 1
00881     __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
00882 #else
00883     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00884     __regTTBR0 = ttbr0;
00885 #endif
00886     __ISB();
00887 }
00888 
00889 /** \brief  Get DACR
00890 
00891     This function returns the value of the Domain Access Control Register.
00892 
00893     \return               Domain Access Control Register value
00894  */
00895 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
00896 #if 1
00897     register uint32_t __regDACR;
00898     __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
00899 #else
00900     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00901 #endif
00902     return(__regDACR);
00903 }
00904 
00905 /** \brief  Set DACR
00906 
00907     This function assigns the given value to the Coprocessor Access Control register.
00908 
00909     \param [in]    dacr   Domain Access Control Register value to set
00910  */
00911 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00912 #if 1
00913     __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
00914 #else
00915     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00916     __regDACR = dacr;
00917 #endif
00918     __ISB();
00919 }
00920 
00921 /******************************** Cache and BTAC enable  ****************************************************/
00922 
00923 /** \brief  Set SCTLR
00924 
00925     This function assigns the given value to the System Control Register.
00926 
00927     \param [in]    sctlr  System Control Register, value to set
00928  */
00929 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00930 {
00931 #if 1
00932     __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
00933 #else
00934     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00935     __regSCTLR = sctlr;
00936 #endif
00937 }
00938 
00939 /** \brief  Get SCTLR
00940 
00941     This function returns the value of the System Control Register.
00942 
00943     \return               System Control Register value
00944  */
00945 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
00946 #if 1
00947     register uint32_t __regSCTLR;
00948     __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
00949 #else
00950     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00951 #endif
00952     return(__regSCTLR);
00953 }
00954 
00955 /** \brief  Enable Caches
00956 
00957     Enable Caches
00958  */
00959 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
00960     // Set I bit 12 to enable I Cache
00961     // Set C bit  2 to enable D Cache
00962     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00963 }
00964 
00965 /** \brief  Disable Caches
00966 
00967     Disable Caches
00968  */
00969 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
00970     // Clear I bit 12 to disable I Cache
00971     // Clear C bit  2 to disable D Cache
00972     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00973     __ISB();
00974 }
00975 
00976 /** \brief  Enable BTAC
00977 
00978     Enable BTAC
00979  */
00980 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
00981     // Set Z bit 11 to enable branch prediction
00982     __set_SCTLR( __get_SCTLR() | (1 << 11));
00983     __ISB();
00984 }
00985 
00986 /** \brief  Disable BTAC
00987 
00988     Disable BTAC
00989  */
00990 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
00991     // Clear Z bit 11 to disable branch prediction
00992     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
00993 }
00994 
00995 
00996 /** \brief  Enable MMU
00997 
00998     Enable MMU
00999  */
01000 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
01001     // Set M bit 0 to enable the MMU
01002     // Set AFE bit to enable simplified access permissions model
01003     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
01004     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
01005     __ISB();
01006 }
01007 
01008 /** \brief  Enable MMU
01009 
01010     Enable MMU
01011  */
01012 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
01013     // Clear M bit 0 to disable the MMU
01014     __set_SCTLR( __get_SCTLR() & ~1);
01015     __ISB();
01016 }
01017 
01018 /******************************** TLB maintenance operations ************************************************/
01019 /** \brief  Invalidate the whole tlb
01020 
01021     TLBIALL. Invalidate the whole tlb
01022  */
01023 
01024 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
01025 #if 1
01026     __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
01027 #else
01028     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
01029     __TLBIALL = 0;
01030 #endif
01031     __DSB();
01032     __ISB();
01033 }
01034 
01035 /******************************** BTB maintenance operations ************************************************/
01036 /** \brief  Invalidate entire branch predictor array
01037 
01038     BPIALL. Branch Predictor Invalidate All.
01039  */
01040 
01041 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
01042 #if 1
01043     __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
01044 #else
01045     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
01046     __BPIALL  = 0;
01047 #endif
01048     __DSB();     //ensure completion of the invalidation
01049     __ISB();     //ensure instruction fetch path sees new state
01050 }
01051 
01052 
01053 /******************************** L1 cache operations ******************************************************/
01054 
01055 /** \brief  Invalidate the whole I$
01056 
01057     ICIALLU. Instruction Cache Invalidate All to PoU
01058  */
01059 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
01060 #if 1
01061     __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
01062 #else
01063     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
01064     __ICIALLU = 0;
01065 #endif
01066     __DSB();     //ensure completion of the invalidation
01067     __ISB();     //ensure instruction fetch path sees new I cache state
01068 }
01069 
01070 /** \brief  Clean D$ by MVA
01071 
01072     DCCMVAC. Data cache clean by MVA to PoC
01073  */
01074 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
01075 #if 1
01076     __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
01077 #else
01078     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
01079     __DCCMVAC = (uint32_t)va;
01080 #endif
01081     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01082 }
01083 
01084 /** \brief  Invalidate D$ by MVA
01085 
01086     DCIMVAC. Data cache invalidate by MVA to PoC
01087  */
01088 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
01089 #if 1
01090     __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
01091 #else
01092     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
01093     __DCIMVAC = (uint32_t)va;
01094 #endif
01095     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01096 }
01097 
01098 /** \brief  Clean and Invalidate D$ by MVA
01099 
01100     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
01101  */
01102 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
01103 #if 1
01104     __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
01105 #else
01106     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
01107     __DCCIMVAC = (uint32_t)va;
01108 #endif
01109     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01110 }
01111 
01112 /** \brief
01113  * Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
01114  */
01115 
01116 /** \brief  __v7_all_cache - helper function
01117 
01118  */
01119 
01120 extern void __v7_all_cache(uint32_t op);
01121 
01122 
01123 /** \brief  Invalidate the whole D$
01124 
01125     DCISW. Invalidate by Set/Way
01126  */
01127 
01128 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
01129     __v7_all_cache(0);
01130 }
01131 
01132 /** \brief  Clean the whole D$
01133 
01134     DCCSW. Clean by Set/Way
01135  */
01136 
01137 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
01138     __v7_all_cache(1);
01139 }
01140 
01141 /** \brief  Clean and invalidate the whole D$
01142 
01143     DCCISW. Clean and Invalidate by Set/Way
01144  */
01145 
01146 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
01147     __v7_all_cache(2);
01148 }
01149 
01150 #include "core_ca_mmu.h"
01151 
01152 #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
01153 
01154 #error TASKING Compiler support not implemented for Cortex-A
01155 
01156 #endif
01157 
01158 /*@} end of CMSIS_Core_RegAccFunctions */
01159 
01160 
01161 #endif /* __CORE_CAFUNC_H__ */