mbed library sources. Supersedes mbed-src.

Fork of mbed-dev by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 #include "platform/critical.h"
00019 
00020 #include "cmsis.h"
00021 #include "platform/mbed_assert.h"
00022 #include "platform/toolchain.h"
00023 
00024 #define EXCLUSIVE_ACCESS (!defined (__CORTEX_M0) && !defined (__CORTEX_M0PLUS))
00025 
00026 static volatile uint32_t interrupt_enable_counter = 0;
00027 static volatile bool critical_interrupts_disabled = false;
00028 
00029 bool core_util_are_interrupts_enabled(void)
00030 {
00031 #if defined(__CORTEX_A9)
00032     return ((__get_CPSR() & 0x80) == 0);
00033 #else
00034     return ((__get_PRIMASK() & 0x1) == 0);
00035 #endif
00036 }
00037 
00038 MBED_WEAK void core_util_critical_section_enter(void)
00039 {
00040     bool interrupts_disabled = !core_util_are_interrupts_enabled();
00041     __disable_irq();
00042 
00043     /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
00044     if (!interrupt_enable_counter) {
00045         critical_interrupts_disabled = interrupts_disabled;
00046     }
00047 
00048     /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
00049        are enabled, then something has gone badly wrong thus assert an error.
00050     */
00051     MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 
00052 // FIXME
00053 #ifndef   FEATURE_UVISOR
00054     if (interrupt_enable_counter > 0) {
00055         MBED_ASSERT(interrupts_disabled);
00056     }
00057 #else
00058 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00059 #endif /* FEATURE_UVISOR */
00060     interrupt_enable_counter++;
00061 }
00062 
00063 MBED_WEAK void core_util_critical_section_exit(void)
00064 {
00065     /* If critical_section_enter has not previously been called, do nothing */
00066     if (interrupt_enable_counter) {
00067 
00068 // FIXME
00069 #ifndef   FEATURE_UVISOR
00070         bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
00071 
00072         MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
00073 #else
00074 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00075 #endif /* FEATURE_UVISOR */
00076 
00077         interrupt_enable_counter--;
00078 
00079         /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
00080            interrupts were enabled on entry to the first critical section.
00081         */
00082         if (!interrupt_enable_counter && !critical_interrupts_disabled) {
00083             __enable_irq();
00084         }
00085     }
00086 }
00087 
00088 #if EXCLUSIVE_ACCESS
00089 
00090 /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
00091 #if defined (__CC_ARM) 
00092 #pragma diag_suppress 3731
00093 #endif
00094 
00095 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00096 {
00097     uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
00098     if (currentValue != *expectedCurrentValue) {
00099         *expectedCurrentValue = currentValue;
00100         __CLREX();
00101         return false;
00102     }
00103 
00104     return !__STREXB(desiredValue, (volatile uint8_t*)ptr);
00105 }
00106 
00107 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00108 {
00109     uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
00110     if (currentValue != *expectedCurrentValue) {
00111         *expectedCurrentValue = currentValue;
00112         __CLREX();
00113         return false;
00114     }
00115 
00116     return !__STREXH(desiredValue, (volatile uint16_t*)ptr);
00117 }
00118 
00119 
00120 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00121 {
00122     uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
00123     if (currentValue != *expectedCurrentValue) {
00124         *expectedCurrentValue = currentValue;
00125         __CLREX();
00126         return false;
00127     }
00128 
00129     return !__STREXW(desiredValue, (volatile uint32_t*)ptr);
00130 }
00131 
00132 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00133 {
00134     uint8_t newValue;
00135     do {
00136         newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
00137     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00138     return newValue;
00139 }
00140 
00141 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00142 {
00143     uint16_t newValue;
00144     do {
00145         newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
00146     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00147     return newValue;
00148 }
00149 
00150 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00151 {
00152     uint32_t newValue;
00153     do {
00154         newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
00155     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00156     return newValue;
00157 }
00158 
00159 
00160 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00161 {
00162     uint8_t newValue;
00163     do {
00164         newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
00165     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00166     return newValue;
00167 }
00168 
00169 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00170 {
00171     uint16_t newValue;
00172     do {
00173         newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
00174     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00175     return newValue;
00176 }
00177 
00178 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00179 {
00180     uint32_t newValue;
00181     do {
00182         newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
00183     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00184     return newValue;
00185 }
00186 
00187 #else
00188 
00189 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00190 {
00191     bool success;
00192     uint8_t currentValue;
00193     core_util_critical_section_enter();
00194     currentValue = *ptr;
00195     if (currentValue == *expectedCurrentValue) {
00196         *ptr = desiredValue;
00197         success = true;
00198     } else {
00199         *expectedCurrentValue = currentValue;
00200         success = false;
00201     }
00202     core_util_critical_section_exit();
00203     return success;
00204 }
00205 
00206 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00207 {
00208     bool success;
00209     uint16_t currentValue;
00210     core_util_critical_section_enter();
00211     currentValue = *ptr;
00212     if (currentValue == *expectedCurrentValue) {
00213         *ptr = desiredValue;
00214         success = true;
00215     } else {
00216         *expectedCurrentValue = currentValue;
00217         success = false;
00218     }
00219     core_util_critical_section_exit();
00220     return success;
00221 }
00222 
00223 
00224 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00225 {
00226     bool success;
00227     uint32_t currentValue;
00228     core_util_critical_section_enter();
00229     currentValue = *ptr;
00230     if (currentValue == *expectedCurrentValue) {
00231         *ptr = desiredValue;
00232         success = true;
00233     } else {
00234         *expectedCurrentValue = currentValue;
00235         success = false;
00236     }
00237     core_util_critical_section_exit();
00238     return success;
00239 }
00240 
00241 
00242 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00243 {
00244     uint8_t newValue;
00245     core_util_critical_section_enter();
00246     newValue = *valuePtr + delta;
00247     *valuePtr = newValue;
00248     core_util_critical_section_exit();
00249     return newValue;
00250 }
00251 
00252 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00253 {
00254     uint16_t newValue;
00255     core_util_critical_section_enter();
00256     newValue = *valuePtr + delta;
00257     *valuePtr = newValue;
00258     core_util_critical_section_exit();
00259     return newValue;
00260 }
00261 
00262 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00263 {
00264     uint32_t newValue;
00265     core_util_critical_section_enter();
00266     newValue = *valuePtr + delta;
00267     *valuePtr = newValue;
00268     core_util_critical_section_exit();
00269     return newValue;
00270 }
00271 
00272 
00273 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00274 {
00275     uint8_t newValue;
00276     core_util_critical_section_enter();
00277     newValue = *valuePtr - delta;
00278     *valuePtr = newValue;
00279     core_util_critical_section_exit();
00280     return newValue;
00281 }
00282 
00283 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00284 {
00285     uint16_t newValue;
00286     core_util_critical_section_enter();
00287     newValue = *valuePtr - delta;
00288     *valuePtr = newValue;
00289     core_util_critical_section_exit();
00290     return newValue;
00291 }
00292 
00293 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00294 {
00295     uint32_t newValue;
00296     core_util_critical_section_enter();
00297     newValue = *valuePtr - delta;
00298     *valuePtr = newValue;
00299     core_util_critical_section_exit();
00300     return newValue;
00301 }
00302 
00303 #endif
00304 
00305 
00306 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
00307     return core_util_atomic_cas_u32(
00308             (uint32_t *)ptr,
00309             (uint32_t *)expectedCurrentValue,
00310             (uint32_t)desiredValue);
00311 }
00312 
00313 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
00314     return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00315 }
00316 
00317 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
00318     return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00319 }
00320