mbed library sources

Dependents:   Encrypted my_mbed lklk CyaSSL_DTLS_Cellular ... more

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers core_caFunc.h Source File

core_caFunc.h

Go to the documentation of this file.
00001 /**************************************************************************//**
00002  * @file     core_caFunc.h
00003  * @brief    CMSIS Cortex-A Core Function Access Header File
00004  * @version  V3.10
00005  * @date     30 Oct 2013
00006  *
00007  * @note
00008  *
00009  ******************************************************************************/
00010 /* Copyright (c) 2009 - 2013 ARM LIMITED
00011 
00012    All rights reserved.
00013    Redistribution and use in source and binary forms, with or without
00014    modification, are permitted provided that the following conditions are met:
00015    - Redistributions of source code must retain the above copyright
00016      notice, this list of conditions and the following disclaimer.
00017    - Redistributions in binary form must reproduce the above copyright
00018      notice, this list of conditions and the following disclaimer in the
00019      documentation and/or other materials provided with the distribution.
00020    - Neither the name of ARM nor the names of its contributors may be used
00021      to endorse or promote products derived from this software without
00022      specific prior written permission.
00023    *
00024    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00025    AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00026    IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00027    ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS AND CONTRIBUTORS BE
00028    LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00029    CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00030    SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00031    INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00032    CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00033    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034    POSSIBILITY OF SUCH DAMAGE.
00035    ---------------------------------------------------------------------------*/
00036 
00037 
00038 #ifndef __CORE_CAFUNC_H__
00039 #define __CORE_CAFUNC_H__
00040 
00041 
00042 /* ###########################  Core Function Access  ########################### */
00043 /** \ingroup  CMSIS_Core_FunctionInterface
00044     \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
00045   @{
00046  */
00047 
00048 #if   defined ( __CC_ARM ) /*------------------RealView Compiler -----------------*/
00049 /* ARM armcc specific functions */
00050 
00051 #if (__ARMCC_VERSION < 400677)
00052   #error "Please use ARM Compiler Toolchain V4.0.677 or later!"
00053 #endif
00054 
00055 #define MODE_USR 0x10
00056 #define MODE_FIQ 0x11
00057 #define MODE_IRQ 0x12
00058 #define MODE_SVC 0x13
00059 #define MODE_MON 0x16
00060 #define MODE_ABT 0x17
00061 #define MODE_HYP 0x1A
00062 #define MODE_UND 0x1B
00063 #define MODE_SYS 0x1F
00064 
00065 /** \brief  Get APSR Register
00066 
00067     This function returns the content of the APSR Register.
00068 
00069     \return               APSR Register value
00070  */
00071 __STATIC_INLINE uint32_t __get_APSR(void)
00072 {
00073   register uint32_t __regAPSR          __ASM("apsr");
00074   return(__regAPSR);
00075 }
00076 
00077 
00078 /** \brief  Get CPSR Register
00079 
00080     This function returns the content of the CPSR Register.
00081 
00082     \return               CPSR Register value
00083  */
00084 __STATIC_INLINE uint32_t __get_CPSR(void)
00085 {
00086   register uint32_t __regCPSR          __ASM("cpsr");
00087   return(__regCPSR);
00088 }
00089 
00090 /** \brief  Set Stack Pointer
00091 
00092     This function assigns the given value to the current stack pointer.
00093 
00094     \param [in]    topOfStack  Stack Pointer value to set
00095  */
00096 register uint32_t __regSP              __ASM("sp");
00097 __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00098 {
00099     __regSP = topOfStack;
00100 }
00101 
00102 
00103 /** \brief  Get link register
00104 
00105     This function returns the value of the link register
00106 
00107     \return    Value of link register
00108  */
00109 register uint32_t __reglr         __ASM("lr");
00110 __STATIC_INLINE uint32_t __get_LR(void)
00111 {
00112   return(__reglr);
00113 }
00114 
00115 /** \brief  Set link register
00116 
00117     This function sets the value of the link register
00118 
00119     \param [in]    lr  LR value to set
00120  */
00121 __STATIC_INLINE void __set_LR(uint32_t lr)
00122 {
00123   __reglr = lr;
00124 }
00125 
00126 /** \brief  Set Process Stack Pointer
00127 
00128     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00129 
00130     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00131  */
00132 __STATIC_ASM void __set_PSP(uint32_t topOfProcStack)
00133 {
00134     ARM
00135     PRESERVE8
00136 
00137     BIC     R0, R0, #7  ;ensure stack is 8-byte aligned
00138     MRS     R1, CPSR
00139     CPS     #MODE_SYS   ;no effect in USR mode
00140     MOV     SP, R0
00141     MSR     CPSR_c, R1  ;no effect in USR mode
00142     ISB
00143     BX      LR
00144 
00145 }
00146 
00147 /** \brief  Set User Mode
00148 
00149     This function changes the processor state to User Mode
00150  */
00151 __STATIC_ASM void __set_CPS_USR(void)
00152 {
00153     ARM 
00154 
00155     CPS  #MODE_USR  
00156     BX   LR
00157 }
00158 
00159 
00160 /** \brief  Enable FIQ
00161 
00162     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00163     Can only be executed in Privileged modes.
00164  */
00165 #define __enable_fault_irq                __enable_fiq
00166 
00167 
00168 /** \brief  Disable FIQ
00169 
00170     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00171     Can only be executed in Privileged modes.
00172  */
00173 #define __disable_fault_irq               __disable_fiq
00174 
00175 
00176 /** \brief  Get FPSCR
00177 
00178     This function returns the current value of the Floating Point Status/Control register.
00179 
00180     \return               Floating Point Status/Control register value
00181  */
00182 __STATIC_INLINE uint32_t __get_FPSCR(void)
00183 {
00184 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00185   register uint32_t __regfpscr         __ASM("fpscr");
00186   return(__regfpscr);
00187 #else
00188    return(0);
00189 #endif
00190 }
00191 
00192 
00193 /** \brief  Set FPSCR
00194 
00195     This function assigns the given value to the Floating Point Status/Control register.
00196 
00197     \param [in]    fpscr  Floating Point Status/Control value to set
00198  */
00199 __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00200 {
00201 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00202   register uint32_t __regfpscr         __ASM("fpscr");
00203   __regfpscr = (fpscr);
00204 #endif
00205 }
00206 
00207 /** \brief  Get FPEXC
00208 
00209     This function returns the current value of the Floating Point Exception Control register.
00210 
00211     \return               Floating Point Exception Control register value
00212  */
00213 __STATIC_INLINE uint32_t __get_FPEXC(void)
00214 {
00215 #if (__FPU_PRESENT == 1)
00216   register uint32_t __regfpexc         __ASM("fpexc");
00217   return(__regfpexc);
00218 #else
00219    return(0);
00220 #endif
00221 }
00222 
00223 
00224 /** \brief  Set FPEXC
00225 
00226     This function assigns the given value to the Floating Point Exception Control register.
00227 
00228     \param [in]    fpscr  Floating Point Exception Control value to set
00229  */
00230 __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00231 {
00232 #if (__FPU_PRESENT == 1)
00233   register uint32_t __regfpexc         __ASM("fpexc");
00234   __regfpexc = (fpexc);
00235 #endif
00236 }
00237 
00238 /** \brief  Get CPACR
00239 
00240     This function returns the current value of the Coprocessor Access Control register.
00241 
00242     \return               Coprocessor Access Control register value
00243  */
00244 __STATIC_INLINE uint32_t __get_CPACR(void)
00245 {
00246     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00247     return __regCPACR;
00248 }
00249 
00250 /** \brief  Set CPACR
00251 
00252     This function assigns the given value to the Coprocessor Access Control register.
00253 
00254     \param [in]    cpacr  Coprocessor Acccess Control value to set
00255  */
00256 __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00257 {
00258     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00259     __regCPACR = cpacr;
00260     __ISB();
00261 }
00262 
00263 /** \brief  Get CBAR
00264 
00265     This function returns the value of the Configuration Base Address register.
00266 
00267     \return               Configuration Base Address register value
00268  */
00269 __STATIC_INLINE uint32_t __get_CBAR() {
00270     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00271     return(__regCBAR);
00272 }
00273 
00274 /** \brief  Get TTBR0
00275 
00276     This function returns the value of the Translation Table Base Register 0.
00277 
00278     \return               Translation Table Base Register 0 value
00279  */
00280 __STATIC_INLINE uint32_t __get_TTBR0() {
00281     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00282     return(__regTTBR0);
00283 }
00284 
00285 /** \brief  Set TTBR0
00286 
00287     This function assigns the given value to the Translation Table Base Register 0.
00288 
00289     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00290  */
00291 __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00292     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00293     __regTTBR0 = ttbr0;
00294     __ISB();
00295 }
00296 
00297 /** \brief  Get DACR
00298 
00299     This function returns the value of the Domain Access Control Register.
00300 
00301     \return               Domain Access Control Register value
00302  */
00303 __STATIC_INLINE uint32_t __get_DACR() {
00304     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00305     return(__regDACR);
00306 }
00307 
00308 /** \brief  Set DACR
00309 
00310     This function assigns the given value to the Domain Access Control Register.
00311 
00312     \param [in]    dacr   Domain Access Control Register value to set
00313  */
00314 __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00315     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00316     __regDACR = dacr;
00317     __ISB();
00318 }
00319 
00320 /******************************** Cache and BTAC enable  ****************************************************/
00321 
00322 /** \brief  Set SCTLR
00323 
00324     This function assigns the given value to the System Control Register.
00325 
00326     \param [in]    sctlr  System Control Register value to set
00327  */
00328 __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00329 {
00330     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00331     __regSCTLR = sctlr;
00332 }
00333 
00334 /** \brief  Get SCTLR
00335 
00336     This function returns the value of the System Control Register.
00337 
00338     \return               System Control Register value
00339  */
00340 __STATIC_INLINE uint32_t __get_SCTLR() {
00341     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00342     return(__regSCTLR);
00343 }
00344 
00345 /** \brief  Enable Caches
00346 
00347     Enable Caches
00348  */
00349 __STATIC_INLINE void __enable_caches(void) {
00350     // Set I bit 12 to enable I Cache
00351     // Set C bit  2 to enable D Cache
00352     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00353 }
00354 
00355 /** \brief  Disable Caches
00356 
00357     Disable Caches
00358  */
00359 __STATIC_INLINE void __disable_caches(void) {
00360     // Clear I bit 12 to disable I Cache
00361     // Clear C bit  2 to disable D Cache
00362     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00363     __ISB();
00364 }
00365 
00366 /** \brief  Enable BTAC
00367 
00368     Enable BTAC
00369  */
00370 __STATIC_INLINE void __enable_btac(void) {
00371     // Set Z bit 11 to enable branch prediction
00372     __set_SCTLR( __get_SCTLR() | (1 << 11));
00373     __ISB();
00374 }
00375 
00376 /** \brief  Disable BTAC
00377 
00378     Disable BTAC
00379  */
00380 __STATIC_INLINE void __disable_btac(void) {
00381     // Clear Z bit 11 to disable branch prediction
00382     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
00383 }
00384 
00385 
00386 /** \brief  Enable MMU
00387 
00388     Enable MMU
00389  */
00390 __STATIC_INLINE void __enable_mmu(void) {
00391     // Set M bit 0 to enable the MMU
00392     // Set AFE bit to enable simplified access permissions model
00393     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
00394     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
00395     __ISB();
00396 }
00397 
00398 /** \brief  Disable MMU
00399 
00400     Disable MMU
00401  */
00402 __STATIC_INLINE void __disable_mmu(void) {
00403     // Clear M bit 0 to disable the MMU
00404     __set_SCTLR( __get_SCTLR() & ~1);
00405     __ISB();
00406 }
00407 
00408 /******************************** TLB maintenance operations ************************************************/
00409 /** \brief  Invalidate the whole tlb
00410 
00411     TLBIALL. Invalidate the whole tlb
00412  */
00413 
00414 __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
00415     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
00416     __TLBIALL = 0;
00417     __DSB();
00418     __ISB();
00419 }
00420 
00421 /******************************** BTB maintenance operations ************************************************/
00422 /** \brief  Invalidate entire branch predictor array
00423 
00424     BPIALL. Branch Predictor Invalidate All.
00425  */
00426 
00427 __STATIC_INLINE void __v7_inv_btac(void) {
00428     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
00429     __BPIALL  = 0;
00430     __DSB();     //ensure completion of the invalidation
00431     __ISB();     //ensure instruction fetch path sees new state
00432 }
00433 
00434 
00435 /******************************** L1 cache operations ******************************************************/
00436 
00437 /** \brief  Invalidate the whole I$
00438 
00439     ICIALLU. Instruction Cache Invalidate All to PoU
00440  */
00441 __STATIC_INLINE void __v7_inv_icache_all(void) {
00442     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
00443     __ICIALLU = 0;
00444     __DSB();     //ensure completion of the invalidation
00445     __ISB();     //ensure instruction fetch path sees new I cache state
00446 }
00447 
00448 /** \brief  Clean D$ by MVA
00449 
00450     DCCMVAC. Data cache clean by MVA to PoC
00451  */
00452 __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
00453     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
00454     __DCCMVAC = (uint32_t)va;
00455     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00456 }
00457 
00458 /** \brief  Invalidate D$ by MVA
00459 
00460     DCIMVAC. Data cache invalidate by MVA to PoC
00461  */
00462 __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
00463     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
00464     __DCIMVAC = (uint32_t)va;
00465     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00466 }
00467 
00468 /** \brief  Clean and Invalidate D$ by MVA
00469 
00470     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
00471  */
00472 __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
00473     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
00474     __DCCIMVAC = (uint32_t)va;
00475     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
00476 }
00477 
00478 /** \brief  Clean and Invalidate the entire data or unified cache
00479 
00480     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
00481  */
00482 #pragma push
00483 #pragma arm
00484 __STATIC_ASM void __v7_all_cache(uint32_t op) {
00485         ARM 
00486 
00487         PUSH    {R4-R11}
00488 
00489         MRC     p15, 1, R6, c0, c0, 1      // Read CLIDR
00490         ANDS    R3, R6, #0x07000000        // Extract coherency level
00491         MOV     R3, R3, LSR #23            // Total cache levels << 1
00492         BEQ     Finished                   // If 0, no need to clean
00493 
00494         MOV     R10, #0                    // R10 holds current cache level << 1
00495 Loop1   ADD     R2, R10, R10, LSR #1       // R2 holds cache "Set" position
00496         MOV     R1, R6, LSR R2             // Bottom 3 bits are the Cache-type for this level
00497         AND     R1, R1, #7                 // Isolate those lower 3 bits
00498         CMP     R1, #2
00499         BLT     Skip                       // No cache or only instruction cache at this level
00500 
00501         MCR     p15, 2, R10, c0, c0, 0     // Write the Cache Size selection register
00502         ISB                                // ISB to sync the change to the CacheSizeID reg
00503         MRC     p15, 1, R1, c0, c0, 0      // Reads current Cache Size ID register
00504         AND     R2, R1, #7                 // Extract the line length field
00505         ADD     R2, R2, #4                 // Add 4 for the line length offset (log2 16 bytes)
00506         LDR     R4, =0x3FF
00507         ANDS    R4, R4, R1, LSR #3         // R4 is the max number on the way size (right aligned)
00508         CLZ     R5, R4                     // R5 is the bit position of the way size increment
00509         LDR     R7, =0x7FFF
00510         ANDS    R7, R7, R1, LSR #13        // R7 is the max number of the index size (right aligned)
00511 
00512 Loop2   MOV     R9, R4                     // R9 working copy of the max way size (right aligned)
00513 
00514 Loop3   ORR     R11, R10, R9, LSL R5       // Factor in the Way number and cache number into R11
00515         ORR     R11, R11, R7, LSL R2       // Factor in the Set number
00516         CMP     R0, #0
00517         BNE     Dccsw
00518         MCR     p15, 0, R11, c7, c6, 2     // DCISW. Invalidate by Set/Way
00519         B       cont
00520 Dccsw   CMP     R0, #1
00521         BNE     Dccisw
00522         MCR     p15, 0, R11, c7, c10, 2    // DCCSW. Clean by Set/Way
00523         B       cont
00524 Dccisw  MCR     p15, 0, R11, c7, c14, 2    // DCCISW. Clean and Invalidate by Set/Way
00525 cont    SUBS    R9, R9, #1                 // Decrement the Way number
00526         BGE     Loop3
00527         SUBS    R7, R7, #1                 // Decrement the Set number
00528         BGE     Loop2
00529 Skip    ADD     R10, R10, #2               // Increment the cache number
00530         CMP     R3, R10
00531         BGT     Loop1
00532 
00533 Finished
00534         DSB
00535         POP    {R4-R11}
00536         BX     lr
00537 
00538 }
00539 #pragma pop
00540 
00541 
00542 /** \brief  Invalidate the whole D$
00543 
00544     DCISW. Invalidate by Set/Way
00545  */
00546 
00547 __STATIC_INLINE void __v7_inv_dcache_all(void) {
00548     __v7_all_cache(0);
00549 }
00550 
00551 /** \brief  Clean the whole D$
00552 
00553     DCCSW. Clean by Set/Way
00554  */
00555 
00556 __STATIC_INLINE void __v7_clean_dcache_all(void) {
00557     __v7_all_cache(1);
00558 }
00559 
00560 /** \brief  Clean and invalidate the whole D$
00561 
00562     DCCISW. Clean and Invalidate by Set/Way
00563  */
00564 
00565 __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
00566     __v7_all_cache(2);
00567 }
00568 
00569 #include "core_ca_mmu.h"
00570 
00571 #elif (defined (__ICCARM__)) /*---------------- ICC Compiler ---------------------*/
00572 
00573 #error IAR Compiler support not implemented for Cortex-A
00574 
00575 #elif (defined (__GNUC__)) /*------------------ GNU Compiler ---------------------*/
00576 /* GNU gcc specific functions */
00577 
00578 #define MODE_USR 0x10
00579 #define MODE_FIQ 0x11
00580 #define MODE_IRQ 0x12
00581 #define MODE_SVC 0x13
00582 #define MODE_MON 0x16
00583 #define MODE_ABT 0x17
00584 #define MODE_HYP 0x1A
00585 #define MODE_UND 0x1B
00586 #define MODE_SYS 0x1F
00587 
00588 
00589 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_irq(void)
00590 {
00591     __ASM volatile ("cpsie i");
00592 }
00593 
00594 /** \brief  Disable IRQ Interrupts
00595 
00596   This function disables IRQ interrupts by setting the I-bit in the CPSR.
00597   Can only be executed in Privileged modes.
00598  */
00599 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __disable_irq(void)
00600 {
00601     uint32_t result;
00602 
00603     __ASM volatile ("mrs %0, cpsr" : "=r" (result));
00604     __ASM volatile ("cpsid i");
00605     return(result & 0x80);
00606 }
00607 
00608 
00609 /** \brief  Get APSR Register
00610 
00611     This function returns the content of the APSR Register.
00612 
00613     \return               APSR Register value
00614  */
00615 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_APSR(void)
00616 {
00617 #if 1
00618   register uint32_t __regAPSR;
00619   __ASM volatile ("mrs %0, apsr" : "=r" (__regAPSR) );
00620 #else
00621   register uint32_t __regAPSR          __ASM("apsr");
00622 #endif
00623   return(__regAPSR);
00624 }
00625 
00626 
00627 /** \brief  Get CPSR Register
00628 
00629     This function returns the content of the CPSR Register.
00630 
00631     \return               CPSR Register value
00632  */
00633 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPSR(void)
00634 {
00635 #if 1
00636   register uint32_t __regCPSR;
00637   __ASM volatile ("mrs %0, cpsr" : "=r" (__regCPSR));
00638 #else
00639   register uint32_t __regCPSR          __ASM("cpsr");
00640 #endif
00641   return(__regCPSR);
00642 }
00643 
00644 #if 0
00645 /** \brief  Set Stack Pointer
00646 
00647     This function assigns the given value to the current stack pointer.
00648 
00649     \param [in]    topOfStack  Stack Pointer value to set
00650  */
00651 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SP(uint32_t topOfStack)
00652 {
00653     register uint32_t __regSP       __ASM("sp");
00654     __regSP = topOfStack;
00655 }
00656 #endif
00657 
00658 /** \brief  Get link register
00659 
00660     This function returns the value of the link register
00661 
00662     \return    Value of link register
00663  */
00664 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_LR(void)
00665 {
00666   register uint32_t __reglr         __ASM("lr");
00667   return(__reglr);
00668 }
00669 
00670 #if 0
00671 /** \brief  Set link register
00672 
00673     This function sets the value of the link register
00674 
00675     \param [in]    lr  LR value to set
00676  */
00677 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_LR(uint32_t lr)
00678 {
00679   register uint32_t __reglr         __ASM("lr");
00680   __reglr = lr;
00681 }
00682 #endif
00683 
00684 /** \brief  Set Process Stack Pointer
00685 
00686     This function assigns the given value to the USR/SYS Stack Pointer (PSP).
00687 
00688     \param [in]    topOfProcStack  USR/SYS Stack Pointer value to set
00689  */
00690 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
00691 {
00692     __asm__ volatile (
00693     ".ARM;"
00694     ".eabi_attribute Tag_ABI_align8_preserved,1;"
00695 
00696     "BIC     R0, R0, #7;" /* ;ensure stack is 8-byte aligned */
00697     "MRS     R1, CPSR;"
00698     "CPS     %0;"         /* ;no effect in USR mode */
00699     "MOV     SP, R0;"
00700     "MSR     CPSR_c, R1;" /* ;no effect in USR mode */
00701     "ISB;"
00702     //"BX      LR;"
00703     :
00704     : "i"(MODE_SYS)
00705     : "r0", "r1");
00706     return;
00707 }
00708 
00709 /** \brief  Set User Mode
00710 
00711     This function changes the processor state to User Mode
00712  */
00713 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPS_USR(void)
00714 {
00715     __asm__ volatile (
00716     ".ARM;"
00717 
00718     "CPS  %0;"
00719     //"BX   LR;"
00720     :
00721     : "i"(MODE_USR)
00722     : );
00723     return;
00724 }
00725 
00726 
00727 /** \brief  Enable FIQ
00728 
00729     This function enables FIQ interrupts by clearing the F-bit in the CPSR.
00730     Can only be executed in Privileged modes.
00731  */
00732 #define __enable_fault_irq()                __asm__ volatile ("cpsie f")
00733 
00734 
00735 /** \brief  Disable FIQ
00736 
00737     This function disables FIQ interrupts by setting the F-bit in the CPSR.
00738     Can only be executed in Privileged modes.
00739  */
00740 #define __disable_fault_irq()               __asm__ volatile ("cpsid f")
00741 
00742 
00743 /** \brief  Get FPSCR
00744 
00745     This function returns the current value of the Floating Point Status/Control register.
00746 
00747     \return               Floating Point Status/Control register value
00748  */
00749 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPSCR(void)
00750 {
00751 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00752 #if 1
00753     uint32_t result;
00754 
00755     __ASM volatile ("vmrs %0, fpscr" : "=r" (result) );
00756     return (result);
00757 #else
00758   register uint32_t __regfpscr         __ASM("fpscr");
00759   return(__regfpscr);
00760 #endif
00761 #else
00762    return(0);
00763 #endif
00764 }
00765 
00766 
00767 /** \brief  Set FPSCR
00768 
00769     This function assigns the given value to the Floating Point Status/Control register.
00770 
00771     \param [in]    fpscr  Floating Point Status/Control value to set
00772  */
00773 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
00774 {
00775 #if (__FPU_PRESENT == 1) && (__FPU_USED == 1)
00776 #if 1
00777     __ASM volatile ("vmsr fpscr, %0" : : "r" (fpscr) );
00778 #else
00779   register uint32_t __regfpscr         __ASM("fpscr");
00780   __regfpscr = (fpscr);
00781 #endif
00782 #endif
00783 }
00784 
00785 /** \brief  Get FPEXC
00786 
00787     This function returns the current value of the Floating Point Exception Control register.
00788 
00789     \return               Floating Point Exception Control register value
00790  */
00791 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_FPEXC(void)
00792 {
00793 #if (__FPU_PRESENT == 1)
00794 #if 1
00795     uint32_t result;
00796 
00797     __ASM volatile ("vmrs %0, fpexc" : "=r" (result));
00798     return (result);
00799 #else
00800   register uint32_t __regfpexc         __ASM("fpexc");
00801   return(__regfpexc);
00802 #endif
00803 #else
00804    return(0);
00805 #endif
00806 }
00807 
00808 
00809 /** \brief  Set FPEXC
00810 
00811     This function assigns the given value to the Floating Point Exception Control register.
00812 
00813     \param [in]    fpscr  Floating Point Exception Control value to set
00814  */
00815 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
00816 {
00817 #if (__FPU_PRESENT == 1)
00818 #if 1
00819     __ASM volatile ("vmsr fpexc, %0" : : "r" (fpexc));
00820 #else
00821   register uint32_t __regfpexc         __ASM("fpexc");
00822   __regfpexc = (fpexc);
00823 #endif
00824 #endif
00825 }
00826 
00827 /** \brief  Get CPACR
00828 
00829     This function returns the current value of the Coprocessor Access Control register.
00830 
00831     \return               Coprocessor Access Control register value
00832  */
00833 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CPACR(void)
00834 {
00835 #if 1
00836     register uint32_t __regCPACR;
00837     __ASM volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r" (__regCPACR));
00838 #else
00839     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00840 #endif
00841     return __regCPACR;
00842 }
00843 
00844 /** \brief  Set CPACR
00845 
00846     This function assigns the given value to the Coprocessor Access Control register.
00847 
00848     \param [in]    cpacr  Coprocessor Acccess Control value to set
00849  */
00850 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_CPACR(uint32_t cpacr)
00851 {
00852 #if 1
00853     __ASM volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (cpacr));
00854 #else
00855     register uint32_t __regCPACR         __ASM("cp15:0:c1:c0:2");
00856     __regCPACR = cpacr;
00857 #endif
00858     __ISB();
00859 }
00860 
00861 /** \brief  Get CBAR
00862 
00863     This function returns the value of the Configuration Base Address register.
00864 
00865     \return               Configuration Base Address register value
00866  */
00867 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_CBAR() {
00868 #if 1
00869     register uint32_t __regCBAR;
00870     __ASM volatile ("mrc p15, 4, %0, c15, c0, 0" : "=r" (__regCBAR));
00871 #else
00872     register uint32_t __regCBAR         __ASM("cp15:4:c15:c0:0");
00873 #endif
00874     return(__regCBAR);
00875 }
00876 
00877 /** \brief  Get TTBR0
00878 
00879     This function returns the value of the Translation Table Base Register 0.
00880 
00881     \return               Translation Table Base Register 0 value
00882  */
00883 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_TTBR0() {
00884 #if 1
00885     register uint32_t __regTTBR0;
00886     __ASM volatile ("mrc p15, 0, %0, c2, c0, 0" : "=r" (__regTTBR0));
00887 #else
00888     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00889 #endif
00890     return(__regTTBR0);
00891 }
00892 
00893 /** \brief  Set TTBR0
00894 
00895     This function assigns the given value to the Translation Table Base Register 0.
00896 
00897     \param [in]    ttbr0  Translation Table Base Register 0 value to set
00898  */
00899 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_TTBR0(uint32_t ttbr0) {
00900 #if 1
00901     __ASM volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (ttbr0));
00902 #else
00903     register uint32_t __regTTBR0        __ASM("cp15:0:c2:c0:0");
00904     __regTTBR0 = ttbr0;
00905 #endif
00906     __ISB();
00907 }
00908 
00909 /** \brief  Get DACR
00910 
00911     This function returns the value of the Domain Access Control Register.
00912 
00913     \return               Domain Access Control Register value
00914  */
00915 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_DACR() {
00916 #if 1
00917     register uint32_t __regDACR;
00918     __ASM volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r" (__regDACR));
00919 #else
00920     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00921 #endif
00922     return(__regDACR);
00923 }
00924 
00925 /** \brief  Set DACR
00926 
00927     This function assigns the given value to the Domain Access Control Register.
00928 
00929     \param [in]    dacr   Domain Access Control Register value to set
00930  */
00931 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_DACR(uint32_t dacr) {
00932 #if 1
00933     __ASM volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (dacr));
00934 #else
00935     register uint32_t __regDACR         __ASM("cp15:0:c3:c0:0");
00936     __regDACR = dacr;
00937 #endif
00938     __ISB();
00939 }
00940 
00941 /******************************** Cache and BTAC enable  ****************************************************/
00942 
00943 /** \brief  Set SCTLR
00944 
00945     This function assigns the given value to the System Control Register.
00946 
00947     \param [in]    sctlr  System Control Register value to set
00948  */
00949 __attribute__( ( always_inline ) ) __STATIC_INLINE void __set_SCTLR(uint32_t sctlr)
00950 {
00951 #if 1
00952     __ASM volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (sctlr));
00953 #else
00954     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00955     __regSCTLR = sctlr;
00956 #endif
00957 }
00958 
00959 /** \brief  Get SCTLR
00960 
00961     This function returns the value of the System Control Register.
00962 
00963     \return               System Control Register value
00964  */
00965 __attribute__( ( always_inline ) ) __STATIC_INLINE uint32_t __get_SCTLR() {
00966 #if 1
00967     register uint32_t __regSCTLR;
00968     __ASM volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (__regSCTLR));
00969 #else
00970     register uint32_t __regSCTLR         __ASM("cp15:0:c1:c0:0");
00971 #endif
00972     return(__regSCTLR);
00973 }
00974 
00975 /** \brief  Enable Caches
00976 
00977     Enable Caches
00978  */
00979 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_caches(void) {
00980     // Set I bit 12 to enable I Cache
00981     // Set C bit  2 to enable D Cache
00982     __set_SCTLR( __get_SCTLR() | (1 << 12) | (1 << 2));
00983 }
00984 
00985 /** \brief  Disable Caches
00986 
00987     Disable Caches
00988  */
00989 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_caches(void) {
00990     // Clear I bit 12 to disable I Cache
00991     // Clear C bit  2 to disable D Cache
00992     __set_SCTLR( __get_SCTLR() & ~(1 << 12) & ~(1 << 2));
00993     __ISB();
00994 }
00995 
00996 /** \brief  Enable BTAC
00997 
00998     Enable BTAC
00999  */
01000 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_btac(void) {
01001     // Set Z bit 11 to enable branch prediction
01002     __set_SCTLR( __get_SCTLR() | (1 << 11));
01003     __ISB();
01004 }
01005 
01006 /** \brief  Disable BTAC
01007 
01008     Disable BTAC
01009  */
01010 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_btac(void) {
01011     // Clear Z bit 11 to disable branch prediction
01012     __set_SCTLR( __get_SCTLR() & ~(1 << 11));
01013 }
01014 
01015 
01016 /** \brief  Enable MMU
01017 
01018     Enable MMU
01019  */
01020 __attribute__( ( always_inline ) ) __STATIC_INLINE void __enable_mmu(void) {
01021     // Set M bit 0 to enable the MMU
01022     // Set AFE bit to enable simplified access permissions model
01023     // Clear TRE bit to disable TEX remap and A bit to disable strict alignment fault checking
01024     __set_SCTLR( (__get_SCTLR() & ~(1 << 28) & ~(1 << 1)) | 1 | (1 << 29));
01025     __ISB();
01026 }
01027 
01028 /** \brief  Disable MMU
01029 
01030     Disable MMU
01031  */
01032 __attribute__( ( always_inline ) ) __STATIC_INLINE void __disable_mmu(void) {
01033     // Clear M bit 0 to disable the MMU
01034     __set_SCTLR( __get_SCTLR() & ~1);
01035     __ISB();
01036 }
01037 
01038 /******************************** TLB maintenance operations ************************************************/
01039 /** \brief  Invalidate the whole tlb
01040 
01041     TLBIALL. Invalidate the whole tlb
01042  */
01043 
01044 __attribute__( ( always_inline ) ) __STATIC_INLINE void __ca9u_inv_tlb_all(void) {
01045 #if 1
01046     __ASM volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
01047 #else
01048     register uint32_t __TLBIALL         __ASM("cp15:0:c8:c7:0");
01049     __TLBIALL = 0;
01050 #endif
01051     __DSB();
01052     __ISB();
01053 }
01054 
01055 /******************************** BTB maintenance operations ************************************************/
01056 /** \brief  Invalidate entire branch predictor array
01057 
01058     BPIALL. Branch Predictor Invalidate All.
01059  */
01060 
01061 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_btac(void) {
01062 #if 1
01063     __ASM volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
01064 #else
01065     register uint32_t __BPIALL          __ASM("cp15:0:c7:c5:6");
01066     __BPIALL  = 0;
01067 #endif
01068     __DSB();     //ensure completion of the invalidation
01069     __ISB();     //ensure instruction fetch path sees new state
01070 }
01071 
01072 
01073 /******************************** L1 cache operations ******************************************************/
01074 
01075 /** \brief  Invalidate the whole I$
01076 
01077     ICIALLU. Instruction Cache Invalidate All to PoU
01078  */
01079 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_icache_all(void) {
01080 #if 1
01081     __ASM volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
01082 #else
01083     register uint32_t __ICIALLU         __ASM("cp15:0:c7:c5:0");
01084     __ICIALLU = 0;
01085 #endif
01086     __DSB();     //ensure completion of the invalidation
01087     __ISB();     //ensure instruction fetch path sees new I cache state
01088 }
01089 
01090 /** \brief  Clean D$ by MVA
01091 
01092     DCCMVAC. Data cache clean by MVA to PoC
01093  */
01094 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_mva(void *va) {
01095 #if 1
01096     __ASM volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" ((uint32_t)va));
01097 #else
01098     register uint32_t __DCCMVAC         __ASM("cp15:0:c7:c10:1");
01099     __DCCMVAC = (uint32_t)va;
01100 #endif
01101     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01102 }
01103 
01104 /** \brief  Invalidate D$ by MVA
01105 
01106     DCIMVAC. Data cache invalidate by MVA to PoC
01107  */
01108 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_mva(void *va) {
01109 #if 1
01110     __ASM volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" ((uint32_t)va));
01111 #else
01112     register uint32_t __DCIMVAC         __ASM("cp15:0:c7:c6:1");
01113     __DCIMVAC = (uint32_t)va;
01114 #endif
01115     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01116 }
01117 
01118 /** \brief  Clean and Invalidate D$ by MVA
01119 
01120     DCCIMVAC. Data cache clean and invalidate by MVA to PoC
01121  */
01122 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_mva(void *va) {
01123 #if 1
01124     __ASM volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" ((uint32_t)va));
01125 #else
01126     register uint32_t __DCCIMVAC        __ASM("cp15:0:c7:c14:1");
01127     __DCCIMVAC = (uint32_t)va;
01128 #endif
01129     __DMB();     //ensure the ordering of data cache maintenance operations and their effects
01130 }
01131 
01132 /** \brief  Clean and Invalidate the entire data or unified cache
01133 
01134     Generic mechanism for cleaning/invalidating the entire data or unified cache to the point of coherency.
01135  */
01136 extern void __v7_all_cache(uint32_t op);
01137 
01138 
01139 /** \brief  Invalidate the whole D$
01140 
01141     DCISW. Invalidate by Set/Way
01142  */
01143 
01144 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_inv_dcache_all(void) {
01145     __v7_all_cache(0);
01146 }
01147 
01148 /** \brief  Clean the whole D$
01149 
01150     DCCSW. Clean by Set/Way
01151  */
01152 
01153 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_dcache_all(void) {
01154     __v7_all_cache(1);
01155 }
01156 
01157 /** \brief  Clean and invalidate the whole D$
01158 
01159     DCCISW. Clean and Invalidate by Set/Way
01160  */
01161 
01162 __attribute__( ( always_inline ) ) __STATIC_INLINE void __v7_clean_inv_dcache_all(void) {
01163     __v7_all_cache(2);
01164 }
01165 
01166 #include "core_ca_mmu.h"
01167 
01168 #elif (defined (__TASKING__)) /*--------------- TASKING Compiler -----------------*/
01169 
01170 #error TASKING Compiler support not implemented for Cortex-A
01171 
01172 #endif
01173 
01174 /*@} end of CMSIS_Core_RegAccFunctions */
01175 
01176 
01177 #endif /* __CORE_CAFUNC_H__ */