|
| 1 | +/* |
| 2 | + * Copyright (c) 2020, Niklas Hauser |
| 3 | + * |
| 4 | + * This file is part of the modm project. |
| 5 | + * |
| 6 | + * This Source Code Form is subject to the terms of the Mozilla Public |
| 7 | + * License, v. 2.0. If a copy of the MPL was not distributed with this |
| 8 | + * file, You can obtain one at http://mozilla.org/MPL/2.0/. |
| 9 | + */ |
| 10 | +// ---------------------------------------------------------------------------- |
| 11 | + |
| 12 | +#include <modm/platform/core/atomic_lock_impl.hpp> |
| 13 | +#include <modm_atomic.hpp> |
| 14 | + |
| 15 | +/* We are implementing the libary interface described here: |
| 16 | + * See https://gcc.gnu.org/wiki/Atomic/GCCMM/LIbrary |
| 17 | + */ |
| 18 | + |
| 19 | +// ============================ atomics for arrays ============================ |
| 20 | +extern "C" void |
| 21 | +__atomic_load(unsigned int size, const volatile void *src, void *dest, int /*memorder*/) |
| 22 | +{ |
| 23 | + __modm_atomic_pre_barrier(__ATOMIC_SEQ_CST); |
| 24 | + { |
| 25 | + modm::atomic::Lock _; |
| 26 | + __builtin_memcpy(dest, (const void*)src, size); |
| 27 | + } |
| 28 | + __modm_atomic_post_barrier(__ATOMIC_SEQ_CST); |
| 29 | +} |
| 30 | + |
| 31 | +extern "C" void |
| 32 | +__atomic_store(unsigned int size, volatile void *dest, void *src, int /*memorder*/) |
| 33 | +{ |
| 34 | + __modm_atomic_pre_barrier(__ATOMIC_SEQ_CST); |
| 35 | + { |
| 36 | + modm::atomic::Lock _; |
| 37 | + __builtin_memcpy((void*)dest, src, size); |
| 38 | + } |
| 39 | + __modm_atomic_post_barrier(__ATOMIC_SEQ_CST); |
| 40 | +} |
| 41 | + |
| 42 | +extern "C" void |
| 43 | +__atomic_exchange(unsigned int size, volatile void *ptr, void *val, void *ret, int /*memorder*/) |
| 44 | +{ |
| 45 | + __modm_atomic_pre_barrier(__ATOMIC_SEQ_CST); |
| 46 | + { |
| 47 | + modm::atomic::Lock _; |
| 48 | + __builtin_memcpy(ret, (void*)ptr, size); |
| 49 | + __builtin_memcpy((void*)ptr, val, size); |
| 50 | + } |
| 51 | + __modm_atomic_post_barrier(__ATOMIC_SEQ_CST); |
| 52 | +} |
| 53 | + |
| 54 | +extern "C" bool |
| 55 | +__atomic_compare_exchange(unsigned int len, volatile void *ptr, void *expected, void *desired, |
| 56 | + int /*success_memorder*/, int /*failure_memorder*/) |
| 57 | +{ |
| 58 | + bool retval{false}; |
| 59 | + __modm_atomic_pre_barrier(__ATOMIC_SEQ_CST); |
| 60 | + { |
| 61 | + modm::atomic::Lock _; |
| 62 | + if (__builtin_memcmp((void*)ptr, expected, len) == 0) [[likely]] |
| 63 | + { |
| 64 | + __builtin_memcpy((void*)ptr, desired, len); |
| 65 | + retval = true; |
| 66 | + } |
| 67 | + else __builtin_memcpy(expected, (void*)ptr, len); |
| 68 | + } |
| 69 | + __modm_atomic_post_barrier(__ATOMIC_SEQ_CST); |
| 70 | + return retval; |
| 71 | +} |
| 72 | + |
| 73 | +%% macro atomic_fetch(len) |
| 74 | + %% for name, op in [("add", "+"), ("sub", "-")] |
| 75 | +extern "C" {{len|u}} |
| 76 | +__atomic_fetch_{{name}}_{{len//8}}(volatile void *ptr, {{len|u}} value, int /*memorder*/) |
| 77 | +{ |
| 78 | + {{len|u}} previous{}; |
| 79 | + __modm_atomic_pre_barrier(__ATOMIC_SEQ_CST); |
| 80 | + { |
| 81 | + modm::atomic::Lock _; |
| 82 | + previous = *reinterpret_cast<volatile {{len|u}}*>(ptr); |
| 83 | + *reinterpret_cast<volatile {{len|u}}*>(ptr) = (previous {{op}} value); |
| 84 | + } |
| 85 | + __modm_atomic_post_barrier(__ATOMIC_SEQ_CST); |
| 86 | + return previous; |
| 87 | +} |
| 88 | + %% endfor |
| 89 | +%% endmacro |
| 90 | + |
| 91 | +%% for length in bit_lengths |
| 92 | +// ========================= atomics for {{length}} bit integers ========================= |
| 93 | +{{ atomic_fetch(length) }} |
| 94 | +%% endfor |
0 commit comments