3 * Copyright (C) 2005-2009 Mathias Froehlich
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License as
7 * published by the Free Software Foundation; either version 2 of the
8 * License, or (at your option) any later version.
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License for more details.
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
21 #include "SGAtomic.hxx"
23 #if defined(SGATOMIC_USE_GCC4_BUILTINS) && defined (__i386__)
25 // Usually the appropriate functions are inlined by gcc.
26 // But if gcc is called with something equivalent to -march=i386,
27 // it will not assume that there is a lock instruction and instead
28 // calls this pair of functions. We will provide them here in this case.
29 // Note that this assembler code will not work on a i386 chip anymore.
30 // But I firmly believe that we can assume to run at least on a i486 ...
34 unsigned __sync_sub_and_fetch_4(volatile void *ptr, unsigned value)
36 register volatile unsigned* mem = reinterpret_cast<volatile unsigned*>(ptr);
37 register unsigned result;
38 __asm__ __volatile__("lock; xadd{l} {%0,%1|%1,%0}"
39 : "=r" (result), "=m" (*mem)
40 : "0" (-value), "m" (*mem)
42 return result - value;
45 unsigned __sync_add_and_fetch_4(volatile void *ptr, unsigned value)
47 register volatile unsigned* mem = reinterpret_cast<volatile unsigned*>(ptr);
48 register unsigned result;
49 __asm__ __volatile__("lock; xadd{l} {%0,%1|%1,%0}"
50 : "=r" (result), "=m" (*mem)
51 : "0" (value), "m" (*mem)
53 return result + value;
56 unsigned __sync_bool_compare_and_swap_4(volatile void *ptr,
57 unsigned oldValue, unsigned newValue)
59 register volatile unsigned* mem = reinterpret_cast<volatile unsigned*>(ptr);
61 __asm__ __volatile__("lock; cmpxchg{l} {%1,%2|%1,%2}"
63 : "q"(newValue), "m"(*mem), "0"(oldValue)
65 return before == oldValue;
68 void __sync_synchronize()
70 __asm__ __volatile__("": : : "memory");