3 * Copyright (C) 2005-2009 Mathias Froehlich
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2 of the
8 * License, or (at your option) any later version.
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
21 # include <simgear_config.h>
24 #include "SGAtomic.hxx"
26 #if defined(SGATOMIC_USE_GCC4_BUILTINS) && !defined (GCC_ATOMIC_BUILTINS_FOUND)
28 // Usually the appropriate functions are inlined by gcc.
29 // But if gcc is called with something equivalent to -march=i386,
30 // it will not assume that there is a lock instruction and instead
31 // calls this pair of functions. We will provide them here in this case.
32 // Note that this assembler code will not work on a i386 chip anymore.
33 // But I firmly believe that we can assume to run at least on a i486 ...
37 unsigned __sync_sub_and_fetch_4(volatile void *ptr, unsigned value)
39 register volatile unsigned* mem = reinterpret_cast<volatile unsigned*>(ptr);
40 register unsigned result;
41 __asm__ __volatile__("lock; xadd{l} {%0,%1|%1,%0}"
42 : "=r" (result), "=m" (*mem)
43 : "0" (-value), "m" (*mem)
45 return result - value;
48 unsigned __sync_add_and_fetch_4(volatile void *ptr, unsigned value)
50 register volatile unsigned* mem = reinterpret_cast<volatile unsigned*>(ptr);
51 register unsigned result;
52 __asm__ __volatile__("lock; xadd{l} {%0,%1|%1,%0}"
53 : "=r" (result), "=m" (*mem)
54 : "0" (value), "m" (*mem)
56 return result + value;
59 unsigned __sync_bool_compare_and_swap_4(volatile void *ptr,
60 unsigned oldValue, unsigned newValue)
62 register volatile unsigned* mem = reinterpret_cast<volatile unsigned*>(ptr);
64 __asm__ __volatile__("lock; cmpxchg{l} {%1,%2|%1,%2}"
66 : "q"(newValue), "m"(*mem), "0"(oldValue)
68 return before == oldValue;
71 void __sync_synchronize()
73 __asm__ __volatile__("": : : "memory");