aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic
diff options
context:
space:
mode:
authorPaul Mackerras <paulus@samba.org>2009-06-12 17:10:05 -0400
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2009-06-14 23:27:38 -0400
commit09d4e0edd4614e787393acc582ac701c6ec3565b (patch)
tree77f3b85e0f59a168ac78639e510ebcbd3791b3d2 /include/asm-generic
parent4c75f84f2c781beb230031234ed961d28771a764 (diff)
lib: Provide generic atomic64_t implementation
Many processor architectures have no 64-bit atomic instructions, but we need atomic64_t in order to support the perf_counter subsystem. This adds an implementation of 64-bit atomic operations using hashed spinlocks to provide atomicity. For each atomic operation, the address of the atomic64_t variable is hashed to an index into an array of 16 spinlocks. That spinlock is taken (with interrupts disabled) around the operation, which can then be coded non-atomically within the lock. On UP, all the spinlock manipulation goes away and we simply disable interrupts around each operation. In fact gcc eliminates the whole atomic64_lock variable as well. Signed-off-by: Paul Mackerras <paulus@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'include/asm-generic')
-rw-r--r--include/asm-generic/atomic64.h42
1 files changed, 42 insertions, 0 deletions
diff --git a/include/asm-generic/atomic64.h b/include/asm-generic/atomic64.h
new file mode 100644
index 000000000000..b18ce4f9ee3d
--- /dev/null
+++ b/include/asm-generic/atomic64.h
@@ -0,0 +1,42 @@
1/*
2 * Generic implementation of 64-bit atomics using spinlocks,
3 * useful on processors that don't have 64-bit atomic instructions.
4 *
5 * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com>
6 *
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version
10 * 2 of the License, or (at your option) any later version.
11 */
12#ifndef _ASM_GENERIC_ATOMIC64_H
13#define _ASM_GENERIC_ATOMIC64_H
14
15typedef struct {
16 long long counter;
17} atomic64_t;
18
19#define ATOMIC64_INIT(i) { (i) }
20
21extern long long atomic64_read(const atomic64_t *v);
22extern void atomic64_set(atomic64_t *v, long long i);
23extern void atomic64_add(long long a, atomic64_t *v);
24extern long long atomic64_add_return(long long a, atomic64_t *v);
25extern void atomic64_sub(long long a, atomic64_t *v);
26extern long long atomic64_sub_return(long long a, atomic64_t *v);
27extern long long atomic64_dec_if_positive(atomic64_t *v);
28extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n);
29extern long long atomic64_xchg(atomic64_t *v, long long new);
30extern int atomic64_add_unless(atomic64_t *v, long long a, long long u);
31
32#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
33#define atomic64_inc(v) atomic64_add(1LL, (v))
34#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
35#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
36#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
37#define atomic64_dec(v) atomic64_sub(1LL, (v))
38#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
39#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
40#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
41
42#endif /* _ASM_GENERIC_ATOMIC64_H */