libuv/src/unix/atomic-ops.h
Ben Noordhuis 91d3598475 unix: add atomic-ops.h
Add cmpxchgi(), cmpxchgl() and cpu_relax() functions that we can use
as simple primitives to build spinlocks out of.

PR-URL: https://github.com/libuv/libuv/pull/197
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Saúl Ibarra Corretgé <saghul@gmail.com>

[NB: This is a back-port of commit a3c3b37 from the v1.x branch.]
2015-02-11 22:30:38 +01:00

61 lines
2.1 KiB
C

/* Copyright (c) 2013, Ben Noordhuis <info@bnoordhuis.nl>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef UV_ATOMIC_OPS_H_
#define UV_ATOMIC_OPS_H_
#include "internal.h" /* UV_UNUSED */
UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval));
UV_UNUSED(static long cmpxchgl(long* ptr, long oldval, long newval));
UV_UNUSED(static void cpu_relax(void));
/* Prefer hand-rolled assembly over the gcc builtins because the latter also
* issue full memory barriers.
*/
UV_UNUSED(static int cmpxchgi(int* ptr, int oldval, int newval)) {
#if defined(__i386__) || defined(__x86_64__)
int out;
__asm__ __volatile__ ("lock; cmpxchg %2, %1;"
: "=a" (out), "+m" (*(volatile int*) ptr)
: "r" (newval), "0" (oldval)
: "memory");
return out;
#else
return __sync_val_compare_and_swap(ptr, oldval, newval);
#endif
}
UV_UNUSED(static long cmpxchgl(long* ptr, long oldval, long newval)) {
#if defined(__i386__) || defined(__x86_64__)
long out;
__asm__ __volatile__ ("lock; cmpxchg %2, %1;"
: "=a" (out), "+m" (*(volatile long*) ptr)
: "r" (newval), "0" (oldval)
: "memory");
return out;
#else
return __sync_val_compare_and_swap(ptr, oldval, newval);
#endif
}
UV_UNUSED(static void cpu_relax(void)) {
#if defined(__i386__) || defined(__x86_64__)
__asm__ __volatile__ ("rep; nop"); /* a.k.a. PAUSE */
#endif
}
#endif /* UV_ATOMIC_OPS_H_ */