added prescaler for 16 bit pwm in LPC1347 target

Fork of mbed-dev by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers mbed_critical.c Source File

mbed_critical.c

00001 /*
00002  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
00003  * SPDX-License-Identifier: Apache-2.0
00004  *
00005  * Licensed under the Apache License, Version 2.0 (the "License"); you may
00006  * not use this file except in compliance with the License.
00007  * You may obtain a copy of the License at
00008  *
00009  * http://www.apache.org/licenses/LICENSE-2.0
00010  *
00011  * Unless required by applicable law or agreed to in writing, software
00012  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
00013  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
00014  * See the License for the specific language governing permissions and
00015  * limitations under the License.
00016  */
00017 
00018 #include "critical.h"
00019 
00020 #include "cmsis.h"
00021 #include "mbed_assert.h"
00022 
00023 #define EXCLUSIVE_ACCESS (!defined (__CORTEX_M0) && !defined (__CORTEX_M0PLUS))
00024 
00025 static volatile uint32_t interrupt_enable_counter = 0;
00026 static volatile bool critical_interrupts_disabled = false;
00027 
00028 bool core_util_are_interrupts_enabled(void)
00029 {
00030 #if defined(__CORTEX_A9)
00031     return ((__get_CPSR() & 0x80) == 0);
00032 #else
00033     return ((__get_PRIMASK() & 0x1) == 0);
00034 #endif
00035 }
00036 
00037 void core_util_critical_section_enter(void)
00038 {
00039     bool interrupts_disabled = !core_util_are_interrupts_enabled();
00040     __disable_irq();
00041 
00042     /* Save the interrupt disabled state as it was prior to any nested critical section lock use */
00043     if (!interrupt_enable_counter) {
00044         critical_interrupts_disabled = interrupts_disabled;
00045     }
00046 
00047     /* If the interrupt_enable_counter overflows or we are in a nested critical section and interrupts
00048        are enabled, then something has gone badly wrong thus assert an error.
00049     */
00050     MBED_ASSERT(interrupt_enable_counter < UINT32_MAX); 
00051 // FIXME
00052 #ifndef   FEATURE_UVISOR
00053     if (interrupt_enable_counter > 0) {
00054         MBED_ASSERT(interrupts_disabled);
00055     }
00056 #else
00057 #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
00058 #endif /* FEATURE_UVISOR */
00059     interrupt_enable_counter++;
00060 }
00061 
00062 void core_util_critical_section_exit(void)
00063 {
00064     /* If critical_section_enter has not previously been called, do nothing */
00065     if (interrupt_enable_counter) {
00066 
00067 // FIXME
00068 #ifndef   FEATURE_UVISOR
00069         bool interrupts_disabled = !core_util_are_interrupts_enabled(); /* get the current interrupt disabled state */
00070 
00071         MBED_ASSERT(interrupts_disabled); /* Interrupts must be disabled on invoking an exit from a critical section */
00072 #else
00073 #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
00074 #endif /* FEATURE_UVISOR */
00075 
00076         interrupt_enable_counter--;
00077 
00078         /* Only re-enable interrupts if we are exiting the last of the nested critical sections and
00079            interrupts were enabled on entry to the first critical section.
00080         */
00081         if (!interrupt_enable_counter && !critical_interrupts_disabled) {
00082             __enable_irq();
00083         }
00084     }
00085 }
00086 
00087 #if EXCLUSIVE_ACCESS
00088 
00089 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00090 {
00091     uint8_t currentValue = __LDREXB((volatile uint8_t*)ptr);
00092     if (currentValue != *expectedCurrentValue) {
00093         *expectedCurrentValue = currentValue;
00094         __CLREX();
00095         return false;
00096     }
00097 
00098     return !__STREXB(desiredValue, (volatile uint8_t*)ptr);
00099 }
00100 
00101 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00102 {
00103     uint16_t currentValue = __LDREXH((volatile uint16_t*)ptr);
00104     if (currentValue != *expectedCurrentValue) {
00105         *expectedCurrentValue = currentValue;
00106         __CLREX();
00107         return false;
00108     }
00109 
00110     return !__STREXH(desiredValue, (volatile uint16_t*)ptr);
00111 }
00112 
00113 
00114 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00115 {
00116     uint32_t currentValue = __LDREXW((volatile uint32_t*)ptr);
00117     if (currentValue != *expectedCurrentValue) {
00118         *expectedCurrentValue = currentValue;
00119         __CLREX();
00120         return false;
00121     }
00122 
00123     return !__STREXW(desiredValue, (volatile uint32_t*)ptr);
00124 }
00125 
00126 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00127 {
00128     uint8_t newValue;
00129     do {
00130         newValue = __LDREXB((volatile uint8_t*)valuePtr) + delta;
00131     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00132     return newValue;
00133 }
00134 
00135 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00136 {
00137     uint16_t newValue;
00138     do {
00139         newValue = __LDREXH((volatile uint16_t*)valuePtr) + delta;
00140     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00141     return newValue;
00142 }
00143 
00144 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00145 {
00146     uint32_t newValue;
00147     do {
00148         newValue = __LDREXW((volatile uint32_t*)valuePtr) + delta;
00149     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00150     return newValue;
00151 }
00152 
00153 
00154 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00155 {
00156     uint8_t newValue;
00157     do {
00158         newValue = __LDREXB((volatile uint8_t*)valuePtr) - delta;
00159     } while (__STREXB(newValue, (volatile uint8_t*)valuePtr));
00160     return newValue;
00161 }
00162 
00163 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00164 {
00165     uint16_t newValue;
00166     do {
00167         newValue = __LDREXH((volatile uint16_t*)valuePtr) - delta;
00168     } while (__STREXH(newValue, (volatile uint16_t*)valuePtr));
00169     return newValue;
00170 }
00171 
00172 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00173 {
00174     uint32_t newValue;
00175     do {
00176         newValue = __LDREXW((volatile uint32_t*)valuePtr) - delta;
00177     } while (__STREXW(newValue, (volatile uint32_t*)valuePtr));
00178     return newValue;
00179 }
00180 
00181 #else
00182 
00183 bool core_util_atomic_cas_u8(uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
00184 {
00185     bool success;
00186     uint8_t currentValue;
00187     core_util_critical_section_enter();
00188     currentValue = *ptr;
00189     if (currentValue == *expectedCurrentValue) {
00190         *ptr = desiredValue;
00191         success = true;
00192     } else {
00193         *expectedCurrentValue = currentValue;
00194         success = false;
00195     }
00196     core_util_critical_section_exit();
00197     return success;
00198 }
00199 
00200 bool core_util_atomic_cas_u16(uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
00201 {
00202     bool success;
00203     uint16_t currentValue;
00204     core_util_critical_section_enter();
00205     currentValue = *ptr;
00206     if (currentValue == *expectedCurrentValue) {
00207         *ptr = desiredValue;
00208         success = true;
00209     } else {
00210         *expectedCurrentValue = currentValue;
00211         success = false;
00212     }
00213     core_util_critical_section_exit();
00214     return success;
00215 }
00216 
00217 
00218 bool core_util_atomic_cas_u32(uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
00219 {
00220     bool success;
00221     uint32_t currentValue;
00222     core_util_critical_section_enter();
00223     currentValue = *ptr;
00224     if (currentValue == *expectedCurrentValue) {
00225         *ptr = desiredValue;
00226         success = true;
00227     } else {
00228         *expectedCurrentValue = currentValue;
00229         success = false;
00230     }
00231     core_util_critical_section_exit();
00232     return success;
00233 }
00234 
00235 
00236 uint8_t core_util_atomic_incr_u8(uint8_t *valuePtr, uint8_t delta)
00237 {
00238     uint8_t newValue;
00239     core_util_critical_section_enter();
00240     newValue = *valuePtr + delta;
00241     *valuePtr = newValue;
00242     core_util_critical_section_exit();
00243     return newValue;
00244 }
00245 
00246 uint16_t core_util_atomic_incr_u16(uint16_t *valuePtr, uint16_t delta)
00247 {
00248     uint16_t newValue;
00249     core_util_critical_section_enter();
00250     newValue = *valuePtr + delta;
00251     *valuePtr = newValue;
00252     core_util_critical_section_exit();
00253     return newValue;
00254 }
00255 
00256 uint32_t core_util_atomic_incr_u32(uint32_t *valuePtr, uint32_t delta)
00257 {
00258     uint32_t newValue;
00259     core_util_critical_section_enter();
00260     newValue = *valuePtr + delta;
00261     *valuePtr = newValue;
00262     core_util_critical_section_exit();
00263     return newValue;
00264 }
00265 
00266 
00267 uint8_t core_util_atomic_decr_u8(uint8_t *valuePtr, uint8_t delta)
00268 {
00269     uint8_t newValue;
00270     core_util_critical_section_enter();
00271     newValue = *valuePtr - delta;
00272     *valuePtr = newValue;
00273     core_util_critical_section_exit();
00274     return newValue;
00275 }
00276 
00277 uint16_t core_util_atomic_decr_u16(uint16_t *valuePtr, uint16_t delta)
00278 {
00279     uint16_t newValue;
00280     core_util_critical_section_enter();
00281     newValue = *valuePtr - delta;
00282     *valuePtr = newValue;
00283     core_util_critical_section_exit();
00284     return newValue;
00285 }
00286 
00287 uint32_t core_util_atomic_decr_u32(uint32_t *valuePtr, uint32_t delta)
00288 {
00289     uint32_t newValue;
00290     core_util_critical_section_enter();
00291     newValue = *valuePtr - delta;
00292     *valuePtr = newValue;
00293     core_util_critical_section_exit();
00294     return newValue;
00295 }
00296 
00297 #endif
00298 
00299 
00300 bool core_util_atomic_cas_ptr(void **ptr, void **expectedCurrentValue, void *desiredValue) {
00301     return core_util_atomic_cas_u32(
00302             (uint32_t *)ptr,
00303             (uint32_t *)expectedCurrentValue,
00304             (uint32_t)desiredValue);
00305 }
00306 
00307 void *core_util_atomic_incr_ptr(void **valuePtr, ptrdiff_t delta) {
00308     return (void *)core_util_atomic_incr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00309 }
00310 
00311 void *core_util_atomic_decr_ptr(void **valuePtr, ptrdiff_t delta) {
00312     return (void *)core_util_atomic_decr_u32((uint32_t *)valuePtr, (uint32_t)delta);
00313 }
00314