aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/include/asm/atomic_32.h185
-rw-r--r--arch/x86/include/asm/atomic_64.h42
-rw-r--r--arch/x86/include/asm/stacktrace.h2
-rw-r--r--arch/x86/kernel/cpu/perf_counter.c8
-rw-r--r--arch/x86/kernel/dumpstack_32.c6
-rw-r--r--arch/x86/kernel/dumpstack_64.c22
-rw-r--r--arch/x86/lib/Makefile1
-rw-r--r--arch/x86/lib/atomic64_32.c230
8 files changed, 352 insertions, 144 deletions
diff --git a/arch/x86/include/asm/atomic_32.h b/arch/x86/include/asm/atomic_32.h
index 2503d4e64c2a..dc5a667ff791 100644
--- a/arch/x86/include/asm/atomic_32.h
+++ b/arch/x86/include/asm/atomic_32.h
@@ -19,7 +19,10 @@
19 * 19 *
20 * Atomically reads the value of @v. 20 * Atomically reads the value of @v.
21 */ 21 */
22#define atomic_read(v) ((v)->counter) 22static inline int atomic_read(const atomic_t *v)
23{
24 return v->counter;
25}
23 26
24/** 27/**
25 * atomic_set - set atomic variable 28 * atomic_set - set atomic variable
@@ -28,7 +31,10 @@
28 * 31 *
29 * Atomically sets the value of @v to @i. 32 * Atomically sets the value of @v to @i.
30 */ 33 */
31#define atomic_set(v, i) (((v)->counter) = (i)) 34static inline void atomic_set(atomic_t *v, int i)
35{
36 v->counter = i;
37}
32 38
33/** 39/**
34 * atomic_add - add integer to atomic variable 40 * atomic_add - add integer to atomic variable
@@ -200,8 +206,15 @@ static inline int atomic_sub_return(int i, atomic_t *v)
200 return atomic_add_return(-i, v); 206 return atomic_add_return(-i, v);
201} 207}
202 208
203#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 209static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
204#define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) 210{
211 return cmpxchg(&v->counter, old, new);
212}
213
214static inline int atomic_xchg(atomic_t *v, int new)
215{
216 return xchg(&v->counter, new);
217}
205 218
206/** 219/**
207 * atomic_add_unless - add unless the number is already a given value 220 * atomic_add_unless - add unless the number is already a given value
@@ -250,45 +263,12 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
250/* An 64bit atomic type */ 263/* An 64bit atomic type */
251 264
252typedef struct { 265typedef struct {
253 unsigned long long counter; 266 u64 __aligned(8) counter;
254} atomic64_t; 267} atomic64_t;
255 268
256#define ATOMIC64_INIT(val) { (val) } 269#define ATOMIC64_INIT(val) { (val) }
257 270
258/** 271extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val);
259 * atomic64_read - read atomic64 variable
260 * @ptr: pointer of type atomic64_t
261 *
262 * Atomically reads the value of @v.
263 * Doesn't imply a read memory barrier.
264 */
265#define __atomic64_read(ptr) ((ptr)->counter)
266
267static inline unsigned long long
268cmpxchg8b(unsigned long long *ptr, unsigned long long old, unsigned long long new)
269{
270 asm volatile(
271
272 LOCK_PREFIX "cmpxchg8b (%[ptr])\n"
273
274 : "=A" (old)
275
276 : [ptr] "D" (ptr),
277 "A" (old),
278 "b" (ll_low(new)),
279 "c" (ll_high(new))
280
281 : "memory");
282
283 return old;
284}
285
286static inline unsigned long long
287atomic64_cmpxchg(atomic64_t *ptr, unsigned long long old_val,
288 unsigned long long new_val)
289{
290 return cmpxchg8b(&ptr->counter, old_val, new_val);
291}
292 272
293/** 273/**
294 * atomic64_xchg - xchg atomic64 variable 274 * atomic64_xchg - xchg atomic64 variable
@@ -298,18 +278,7 @@ atomic64_cmpxchg(atomic64_t *ptr, unsigned long long old_val,
298 * Atomically xchgs the value of @ptr to @new_val and returns 278 * Atomically xchgs the value of @ptr to @new_val and returns
299 * the old value. 279 * the old value.
300 */ 280 */
301 281extern u64 atomic64_xchg(atomic64_t *ptr, u64 new_val);
302static inline unsigned long long
303atomic64_xchg(atomic64_t *ptr, unsigned long long new_val)
304{
305 unsigned long long old_val;
306
307 do {
308 old_val = atomic_read(ptr);
309 } while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
310
311 return old_val;
312}
313 282
314/** 283/**
315 * atomic64_set - set atomic64 variable 284 * atomic64_set - set atomic64 variable
@@ -318,10 +287,7 @@ atomic64_xchg(atomic64_t *ptr, unsigned long long new_val)
318 * 287 *
319 * Atomically sets the value of @ptr to @new_val. 288 * Atomically sets the value of @ptr to @new_val.
320 */ 289 */
321static inline void atomic64_set(atomic64_t *ptr, unsigned long long new_val) 290extern void atomic64_set(atomic64_t *ptr, u64 new_val);
322{
323 atomic64_xchg(ptr, new_val);
324}
325 291
326/** 292/**
327 * atomic64_read - read atomic64 variable 293 * atomic64_read - read atomic64 variable
@@ -329,17 +295,30 @@ static inline void atomic64_set(atomic64_t *ptr, unsigned long long new_val)
329 * 295 *
330 * Atomically reads the value of @ptr and returns it. 296 * Atomically reads the value of @ptr and returns it.
331 */ 297 */
332static inline unsigned long long atomic64_read(atomic64_t *ptr) 298static inline u64 atomic64_read(atomic64_t *ptr)
333{ 299{
334 unsigned long long curr_val; 300 u64 res;
335 301
336 do { 302 /*
337 curr_val = __atomic64_read(ptr); 303 * Note, we inline this atomic64_t primitive because
338 } while (atomic64_cmpxchg(ptr, curr_val, curr_val) != curr_val); 304 * it only clobbers EAX/EDX and leaves the others
339 305 * untouched. We also (somewhat subtly) rely on the
340 return curr_val; 306 * fact that cmpxchg8b returns the current 64-bit value
307 * of the memory location we are touching:
308 */
309 asm volatile(
310 "mov %%ebx, %%eax\n\t"
311 "mov %%ecx, %%edx\n\t"
312 LOCK_PREFIX "cmpxchg8b %1\n"
313 : "=&A" (res)
314 : "m" (*ptr)
315 );
316
317 return res;
341} 318}
342 319
320extern u64 atomic64_read(atomic64_t *ptr);
321
343/** 322/**
344 * atomic64_add_return - add and return 323 * atomic64_add_return - add and return
345 * @delta: integer value to add 324 * @delta: integer value to add
@@ -347,34 +326,14 @@ static inline unsigned long long atomic64_read(atomic64_t *ptr)
347 * 326 *
348 * Atomically adds @delta to @ptr and returns @delta + *@ptr 327 * Atomically adds @delta to @ptr and returns @delta + *@ptr
349 */ 328 */
350static inline unsigned long long 329extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr);
351atomic64_add_return(unsigned long long delta, atomic64_t *ptr)
352{
353 unsigned long long old_val, new_val;
354
355 do {
356 old_val = atomic_read(ptr);
357 new_val = old_val + delta;
358
359 } while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
360
361 return new_val;
362}
363
364static inline long atomic64_sub_return(unsigned long long delta, atomic64_t *ptr)
365{
366 return atomic64_add_return(-delta, ptr);
367}
368 330
369static inline long atomic64_inc_return(atomic64_t *ptr) 331/*
370{ 332 * Other variants with different arithmetic operators:
371 return atomic64_add_return(1, ptr); 333 */
372} 334extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr);
373 335extern u64 atomic64_inc_return(atomic64_t *ptr);
374static inline long atomic64_dec_return(atomic64_t *ptr) 336extern u64 atomic64_dec_return(atomic64_t *ptr);
375{
376 return atomic64_sub_return(1, ptr);
377}
378 337
379/** 338/**
380 * atomic64_add - add integer to atomic64 variable 339 * atomic64_add - add integer to atomic64 variable
@@ -383,10 +342,7 @@ static inline long atomic64_dec_return(atomic64_t *ptr)
383 * 342 *
384 * Atomically adds @delta to @ptr. 343 * Atomically adds @delta to @ptr.
385 */ 344 */
386static inline void atomic64_add(unsigned long long delta, atomic64_t *ptr) 345extern void atomic64_add(u64 delta, atomic64_t *ptr);
387{
388 atomic64_add_return(delta, ptr);
389}
390 346
391/** 347/**
392 * atomic64_sub - subtract the atomic64 variable 348 * atomic64_sub - subtract the atomic64 variable
@@ -395,10 +351,7 @@ static inline void atomic64_add(unsigned long long delta, atomic64_t *ptr)
395 * 351 *
396 * Atomically subtracts @delta from @ptr. 352 * Atomically subtracts @delta from @ptr.
397 */ 353 */
398static inline void atomic64_sub(unsigned long long delta, atomic64_t *ptr) 354extern void atomic64_sub(u64 delta, atomic64_t *ptr);
399{
400 atomic64_add(-delta, ptr);
401}
402 355
403/** 356/**
404 * atomic64_sub_and_test - subtract value from variable and test result 357 * atomic64_sub_and_test - subtract value from variable and test result
@@ -409,13 +362,7 @@ static inline void atomic64_sub(unsigned long long delta, atomic64_t *ptr)
409 * true if the result is zero, or false for all 362 * true if the result is zero, or false for all
410 * other cases. 363 * other cases.
411 */ 364 */
412static inline int 365extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr);
413atomic64_sub_and_test(unsigned long long delta, atomic64_t *ptr)
414{
415 unsigned long long old_val = atomic64_sub_return(delta, ptr);
416
417 return old_val == 0;
418}
419 366
420/** 367/**
421 * atomic64_inc - increment atomic64 variable 368 * atomic64_inc - increment atomic64 variable
@@ -423,10 +370,7 @@ atomic64_sub_and_test(unsigned long long delta, atomic64_t *ptr)
423 * 370 *
424 * Atomically increments @ptr by 1. 371 * Atomically increments @ptr by 1.
425 */ 372 */
426static inline void atomic64_inc(atomic64_t *ptr) 373extern void atomic64_inc(atomic64_t *ptr);
427{
428 atomic64_add(1, ptr);
429}
430 374
431/** 375/**
432 * atomic64_dec - decrement atomic64 variable 376 * atomic64_dec - decrement atomic64 variable
@@ -434,10 +378,7 @@ static inline void atomic64_inc(atomic64_t *ptr)
434 * 378 *
435 * Atomically decrements @ptr by 1. 379 * Atomically decrements @ptr by 1.
436 */ 380 */
437static inline void atomic64_dec(atomic64_t *ptr) 381extern void atomic64_dec(atomic64_t *ptr);
438{
439 atomic64_sub(1, ptr);
440}
441 382
442/** 383/**
443 * atomic64_dec_and_test - decrement and test 384 * atomic64_dec_and_test - decrement and test
@@ -447,10 +388,7 @@ static inline void atomic64_dec(atomic64_t *ptr)
447 * returns true if the result is 0, or false for all other 388 * returns true if the result is 0, or false for all other
448 * cases. 389 * cases.
449 */ 390 */
450static inline int atomic64_dec_and_test(atomic64_t *ptr) 391extern int atomic64_dec_and_test(atomic64_t *ptr);
451{
452 return atomic64_sub_and_test(1, ptr);
453}
454 392
455/** 393/**
456 * atomic64_inc_and_test - increment and test 394 * atomic64_inc_and_test - increment and test
@@ -460,10 +398,7 @@ static inline int atomic64_dec_and_test(atomic64_t *ptr)
460 * and returns true if the result is zero, or false for all 398 * and returns true if the result is zero, or false for all
461 * other cases. 399 * other cases.
462 */ 400 */
463static inline int atomic64_inc_and_test(atomic64_t *ptr) 401extern int atomic64_inc_and_test(atomic64_t *ptr);
464{
465 return atomic64_sub_and_test(-1, ptr);
466}
467 402
468/** 403/**
469 * atomic64_add_negative - add and test if negative 404 * atomic64_add_negative - add and test if negative
@@ -474,13 +409,7 @@ static inline int atomic64_inc_and_test(atomic64_t *ptr)
474 * if the result is negative, or false when 409 * if the result is negative, or false when
475 * result is greater than or equal to zero. 410 * result is greater than or equal to zero.
476 */ 411 */
477static inline int 412extern int atomic64_add_negative(u64 delta, atomic64_t *ptr);
478atomic64_add_negative(unsigned long long delta, atomic64_t *ptr)
479{
480 long long old_val = atomic64_add_return(delta, ptr);
481
482 return old_val < 0;
483}
484 413
485#include <asm-generic/atomic-long.h> 414#include <asm-generic/atomic-long.h>
486#endif /* _ASM_X86_ATOMIC_32_H */ 415#endif /* _ASM_X86_ATOMIC_32_H */
diff --git a/arch/x86/include/asm/atomic_64.h b/arch/x86/include/asm/atomic_64.h
index 0d6360220007..d605dc268e79 100644
--- a/arch/x86/include/asm/atomic_64.h
+++ b/arch/x86/include/asm/atomic_64.h
@@ -18,7 +18,10 @@
18 * 18 *
19 * Atomically reads the value of @v. 19 * Atomically reads the value of @v.
20 */ 20 */
21#define atomic_read(v) ((v)->counter) 21static inline int atomic_read(const atomic_t *v)
22{
23 return v->counter;
24}
22 25
23/** 26/**
24 * atomic_set - set atomic variable 27 * atomic_set - set atomic variable
@@ -27,7 +30,10 @@
27 * 30 *
28 * Atomically sets the value of @v to @i. 31 * Atomically sets the value of @v to @i.
29 */ 32 */
30#define atomic_set(v, i) (((v)->counter) = (i)) 33static inline void atomic_set(atomic_t *v, int i)
34{
35 v->counter = i;
36}
31 37
32/** 38/**
33 * atomic_add - add integer to atomic variable 39 * atomic_add - add integer to atomic variable
@@ -192,7 +198,10 @@ static inline int atomic_sub_return(int i, atomic_t *v)
192 * Atomically reads the value of @v. 198 * Atomically reads the value of @v.
193 * Doesn't imply a read memory barrier. 199 * Doesn't imply a read memory barrier.
194 */ 200 */
195#define atomic64_read(v) ((v)->counter) 201static inline long atomic64_read(const atomic64_t *v)
202{
203 return v->counter;
204}
196 205
197/** 206/**
198 * atomic64_set - set atomic64 variable 207 * atomic64_set - set atomic64 variable
@@ -201,7 +210,10 @@ static inline int atomic_sub_return(int i, atomic_t *v)
201 * 210 *
202 * Atomically sets the value of @v to @i. 211 * Atomically sets the value of @v to @i.
203 */ 212 */
204#define atomic64_set(v, i) (((v)->counter) = (i)) 213static inline void atomic64_set(atomic64_t *v, long i)
214{
215 v->counter = i;
216}
205 217
206/** 218/**
207 * atomic64_add - add integer to atomic64 variable 219 * atomic64_add - add integer to atomic64 variable
@@ -355,11 +367,25 @@ static inline long atomic64_sub_return(long i, atomic64_t *v)
355#define atomic64_inc_return(v) (atomic64_add_return(1, (v))) 367#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
356#define atomic64_dec_return(v) (atomic64_sub_return(1, (v))) 368#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
357 369
358#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 370static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
359#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 371{
372 return cmpxchg(&v->counter, old, new);
373}
374
375static inline long atomic64_xchg(atomic64_t *v, long new)
376{
377 return xchg(&v->counter, new);
378}
360 379
361#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 380static inline long atomic_cmpxchg(atomic_t *v, int old, int new)
362#define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) 381{
382 return cmpxchg(&v->counter, old, new);
383}
384
385static inline long atomic_xchg(atomic_t *v, int new)
386{
387 return xchg(&v->counter, new);
388}
363 389
364/** 390/**
365 * atomic_add_unless - add unless the number is a given value 391 * atomic_add_unless - add unless the number is a given value
diff --git a/arch/x86/include/asm/stacktrace.h b/arch/x86/include/asm/stacktrace.h
index f517944b2b17..cf86a5e73815 100644
--- a/arch/x86/include/asm/stacktrace.h
+++ b/arch/x86/include/asm/stacktrace.h
@@ -3,6 +3,8 @@
3 3
4extern int kstack_depth_to_print; 4extern int kstack_depth_to_print;
5 5
6int x86_is_stack_id(int id, char *name);
7
6/* Generic stack tracer with callbacks */ 8/* Generic stack tracer with callbacks */
7 9
8struct stacktrace_ops { 10struct stacktrace_ops {
diff --git a/arch/x86/kernel/cpu/perf_counter.c b/arch/x86/kernel/cpu/perf_counter.c
index d4cf4ce19aac..36c3dc7b8991 100644
--- a/arch/x86/kernel/cpu/perf_counter.c
+++ b/arch/x86/kernel/cpu/perf_counter.c
@@ -1561,6 +1561,7 @@ void callchain_store(struct perf_callchain_entry *entry, u64 ip)
1561 1561
1562static DEFINE_PER_CPU(struct perf_callchain_entry, irq_entry); 1562static DEFINE_PER_CPU(struct perf_callchain_entry, irq_entry);
1563static DEFINE_PER_CPU(struct perf_callchain_entry, nmi_entry); 1563static DEFINE_PER_CPU(struct perf_callchain_entry, nmi_entry);
1564static DEFINE_PER_CPU(int, in_nmi_frame);
1564 1565
1565 1566
1566static void 1567static void
@@ -1576,7 +1577,9 @@ static void backtrace_warning(void *data, char *msg)
1576 1577
1577static int backtrace_stack(void *data, char *name) 1578static int backtrace_stack(void *data, char *name)
1578{ 1579{
1579 /* Process all stacks: */ 1580 per_cpu(in_nmi_frame, smp_processor_id()) =
1581 x86_is_stack_id(NMI_STACK, name);
1582
1580 return 0; 1583 return 0;
1581} 1584}
1582 1585
@@ -1584,6 +1587,9 @@ static void backtrace_address(void *data, unsigned long addr, int reliable)
1584{ 1587{
1585 struct perf_callchain_entry *entry = data; 1588 struct perf_callchain_entry *entry = data;
1586 1589
1590 if (per_cpu(in_nmi_frame, smp_processor_id()))
1591 return;
1592
1587 if (reliable) 1593 if (reliable)
1588 callchain_store(entry, addr); 1594 callchain_store(entry, addr);
1589} 1595}
diff --git a/arch/x86/kernel/dumpstack_32.c b/arch/x86/kernel/dumpstack_32.c
index d593cd1f58dc..bca5fba91c9e 100644
--- a/arch/x86/kernel/dumpstack_32.c
+++ b/arch/x86/kernel/dumpstack_32.c
@@ -19,6 +19,12 @@
19 19
20#include "dumpstack.h" 20#include "dumpstack.h"
21 21
22/* Just a stub for now */
23int x86_is_stack_id(int id, char *name)
24{
25 return 0;
26}
27
22void dump_trace(struct task_struct *task, struct pt_regs *regs, 28void dump_trace(struct task_struct *task, struct pt_regs *regs,
23 unsigned long *stack, unsigned long bp, 29 unsigned long *stack, unsigned long bp,
24 const struct stacktrace_ops *ops, void *data) 30 const struct stacktrace_ops *ops, void *data)
diff --git a/arch/x86/kernel/dumpstack_64.c b/arch/x86/kernel/dumpstack_64.c
index d35db5993fd6..54b0a3276766 100644
--- a/arch/x86/kernel/dumpstack_64.c
+++ b/arch/x86/kernel/dumpstack_64.c
@@ -19,10 +19,8 @@
19 19
20#include "dumpstack.h" 20#include "dumpstack.h"
21 21
22static unsigned long *in_exception_stack(unsigned cpu, unsigned long stack, 22
23 unsigned *usedp, char **idp) 23static char x86_stack_ids[][8] = {
24{
25 static char ids[][8] = {
26 [DEBUG_STACK - 1] = "#DB", 24 [DEBUG_STACK - 1] = "#DB",
27 [NMI_STACK - 1] = "NMI", 25 [NMI_STACK - 1] = "NMI",
28 [DOUBLEFAULT_STACK - 1] = "#DF", 26 [DOUBLEFAULT_STACK - 1] = "#DF",
@@ -33,6 +31,15 @@ static unsigned long *in_exception_stack(unsigned cpu, unsigned long stack,
33 N_EXCEPTION_STACKS + DEBUG_STKSZ / EXCEPTION_STKSZ - 2] = "#DB[?]" 31 N_EXCEPTION_STACKS + DEBUG_STKSZ / EXCEPTION_STKSZ - 2] = "#DB[?]"
34#endif 32#endif
35 }; 33 };
34
35int x86_is_stack_id(int id, char *name)
36{
37 return x86_stack_ids[id - 1] == name;
38}
39
40static unsigned long *in_exception_stack(unsigned cpu, unsigned long stack,
41 unsigned *usedp, char **idp)
42{
36 unsigned k; 43 unsigned k;
37 44
38 /* 45 /*
@@ -61,7 +68,7 @@ static unsigned long *in_exception_stack(unsigned cpu, unsigned long stack,
61 if (*usedp & (1U << k)) 68 if (*usedp & (1U << k))
62 break; 69 break;
63 *usedp |= 1U << k; 70 *usedp |= 1U << k;
64 *idp = ids[k]; 71 *idp = x86_stack_ids[k];
65 return (unsigned long *)end; 72 return (unsigned long *)end;
66 } 73 }
67 /* 74 /*
@@ -81,12 +88,13 @@ static unsigned long *in_exception_stack(unsigned cpu, unsigned long stack,
81 do { 88 do {
82 ++j; 89 ++j;
83 end -= EXCEPTION_STKSZ; 90 end -= EXCEPTION_STKSZ;
84 ids[j][4] = '1' + (j - N_EXCEPTION_STACKS); 91 x86_stack_ids[j][4] = '1' +
92 (j - N_EXCEPTION_STACKS);
85 } while (stack < end - EXCEPTION_STKSZ); 93 } while (stack < end - EXCEPTION_STKSZ);
86 if (*usedp & (1U << j)) 94 if (*usedp & (1U << j))
87 break; 95 break;
88 *usedp |= 1U << j; 96 *usedp |= 1U << j;
89 *idp = ids[j]; 97 *idp = x86_stack_ids[j];
90 return (unsigned long *)end; 98 return (unsigned long *)end;
91 } 99 }
92#endif 100#endif
diff --git a/arch/x86/lib/Makefile b/arch/x86/lib/Makefile
index f9d35632666b..07c31899c9c2 100644
--- a/arch/x86/lib/Makefile
+++ b/arch/x86/lib/Makefile
@@ -10,6 +10,7 @@ lib-y += usercopy_$(BITS).o getuser.o putuser.o
10lib-y += memcpy_$(BITS).o 10lib-y += memcpy_$(BITS).o
11 11
12ifeq ($(CONFIG_X86_32),y) 12ifeq ($(CONFIG_X86_32),y)
13 obj-y += atomic64_32.o
13 lib-y += checksum_32.o 14 lib-y += checksum_32.o
14 lib-y += strstr_32.o 15 lib-y += strstr_32.o
15 lib-y += semaphore_32.o string_32.o 16 lib-y += semaphore_32.o string_32.o
diff --git a/arch/x86/lib/atomic64_32.c b/arch/x86/lib/atomic64_32.c
new file mode 100644
index 000000000000..824fa0be55a3
--- /dev/null
+++ b/arch/x86/lib/atomic64_32.c
@@ -0,0 +1,230 @@
1#include <linux/compiler.h>
2#include <linux/module.h>
3#include <linux/types.h>
4
5#include <asm/processor.h>
6#include <asm/cmpxchg.h>
7#include <asm/atomic.h>
8
9static noinline u64 cmpxchg8b(u64 *ptr, u64 old, u64 new)
10{
11 u32 low = new;
12 u32 high = new >> 32;
13
14 asm volatile(
15 LOCK_PREFIX "cmpxchg8b %1\n"
16 : "+A" (old), "+m" (*ptr)
17 : "b" (low), "c" (high)
18 );
19 return old;
20}
21
22u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val)
23{
24 return cmpxchg8b(&ptr->counter, old_val, new_val);
25}
26EXPORT_SYMBOL(atomic64_cmpxchg);
27
28/**
29 * atomic64_xchg - xchg atomic64 variable
30 * @ptr: pointer to type atomic64_t
31 * @new_val: value to assign
32 *
33 * Atomically xchgs the value of @ptr to @new_val and returns
34 * the old value.
35 */
36u64 atomic64_xchg(atomic64_t *ptr, u64 new_val)
37{
38 /*
39 * Try first with a (possibly incorrect) assumption about
40 * what we have there. We'll do two loops most likely,
41 * but we'll get an ownership MESI transaction straight away
42 * instead of a read transaction followed by a
43 * flush-for-ownership transaction:
44 */
45 u64 old_val, real_val = 0;
46
47 do {
48 old_val = real_val;
49
50 real_val = atomic64_cmpxchg(ptr, old_val, new_val);
51
52 } while (real_val != old_val);
53
54 return old_val;
55}
56EXPORT_SYMBOL(atomic64_xchg);
57
58/**
59 * atomic64_set - set atomic64 variable
60 * @ptr: pointer to type atomic64_t
61 * @new_val: value to assign
62 *
63 * Atomically sets the value of @ptr to @new_val.
64 */
65void atomic64_set(atomic64_t *ptr, u64 new_val)
66{
67 atomic64_xchg(ptr, new_val);
68}
69EXPORT_SYMBOL(atomic64_set);
70
71/**
72EXPORT_SYMBOL(atomic64_read);
73 * atomic64_add_return - add and return
74 * @delta: integer value to add
75 * @ptr: pointer to type atomic64_t
76 *
77 * Atomically adds @delta to @ptr and returns @delta + *@ptr
78 */
79noinline u64 atomic64_add_return(u64 delta, atomic64_t *ptr)
80{
81 /*
82 * Try first with a (possibly incorrect) assumption about
83 * what we have there. We'll do two loops most likely,
84 * but we'll get an ownership MESI transaction straight away
85 * instead of a read transaction followed by a
86 * flush-for-ownership transaction:
87 */
88 u64 old_val, new_val, real_val = 0;
89
90 do {
91 old_val = real_val;
92 new_val = old_val + delta;
93
94 real_val = atomic64_cmpxchg(ptr, old_val, new_val);
95
96 } while (real_val != old_val);
97
98 return new_val;
99}
100EXPORT_SYMBOL(atomic64_add_return);
101
102u64 atomic64_sub_return(u64 delta, atomic64_t *ptr)
103{
104 return atomic64_add_return(-delta, ptr);
105}
106EXPORT_SYMBOL(atomic64_sub_return);
107
108u64 atomic64_inc_return(atomic64_t *ptr)
109{
110 return atomic64_add_return(1, ptr);
111}
112EXPORT_SYMBOL(atomic64_inc_return);
113
114u64 atomic64_dec_return(atomic64_t *ptr)
115{
116 return atomic64_sub_return(1, ptr);
117}
118EXPORT_SYMBOL(atomic64_dec_return);
119
120/**
121 * atomic64_add - add integer to atomic64 variable
122 * @delta: integer value to add
123 * @ptr: pointer to type atomic64_t
124 *
125 * Atomically adds @delta to @ptr.
126 */
127void atomic64_add(u64 delta, atomic64_t *ptr)
128{
129 atomic64_add_return(delta, ptr);
130}
131EXPORT_SYMBOL(atomic64_add);
132
133/**
134 * atomic64_sub - subtract the atomic64 variable
135 * @delta: integer value to subtract
136 * @ptr: pointer to type atomic64_t
137 *
138 * Atomically subtracts @delta from @ptr.
139 */
140void atomic64_sub(u64 delta, atomic64_t *ptr)
141{
142 atomic64_add(-delta, ptr);
143}
144EXPORT_SYMBOL(atomic64_sub);
145
146/**
147 * atomic64_sub_and_test - subtract value from variable and test result
148 * @delta: integer value to subtract
149 * @ptr: pointer to type atomic64_t
150 *
151 * Atomically subtracts @delta from @ptr and returns
152 * true if the result is zero, or false for all
153 * other cases.
154 */
155int atomic64_sub_and_test(u64 delta, atomic64_t *ptr)
156{
157 u64 new_val = atomic64_sub_return(delta, ptr);
158
159 return new_val == 0;
160}
161EXPORT_SYMBOL(atomic64_sub_and_test);
162
163/**
164 * atomic64_inc - increment atomic64 variable
165 * @ptr: pointer to type atomic64_t
166 *
167 * Atomically increments @ptr by 1.
168 */
169void atomic64_inc(atomic64_t *ptr)
170{
171 atomic64_add(1, ptr);
172}
173EXPORT_SYMBOL(atomic64_inc);
174
175/**
176 * atomic64_dec - decrement atomic64 variable
177 * @ptr: pointer to type atomic64_t
178 *
179 * Atomically decrements @ptr by 1.
180 */
181void atomic64_dec(atomic64_t *ptr)
182{
183 atomic64_sub(1, ptr);
184}
185EXPORT_SYMBOL(atomic64_dec);
186
187/**
188 * atomic64_dec_and_test - decrement and test
189 * @ptr: pointer to type atomic64_t
190 *
191 * Atomically decrements @ptr by 1 and
192 * returns true if the result is 0, or false for all other
193 * cases.
194 */
195int atomic64_dec_and_test(atomic64_t *ptr)
196{
197 return atomic64_sub_and_test(1, ptr);
198}
199EXPORT_SYMBOL(atomic64_dec_and_test);
200
201/**
202 * atomic64_inc_and_test - increment and test
203 * @ptr: pointer to type atomic64_t
204 *
205 * Atomically increments @ptr by 1
206 * and returns true if the result is zero, or false for all
207 * other cases.
208 */
209int atomic64_inc_and_test(atomic64_t *ptr)
210{
211 return atomic64_sub_and_test(-1, ptr);
212}
213EXPORT_SYMBOL(atomic64_inc_and_test);
214
215/**
216 * atomic64_add_negative - add and test if negative
217 * @delta: integer value to add
218 * @ptr: pointer to type atomic64_t
219 *
220 * Atomically adds @delta to @ptr and returns true
221 * if the result is negative, or false when
222 * result is greater than or equal to zero.
223 */
224int atomic64_add_negative(u64 delta, atomic64_t *ptr)
225{
226 s64 new_val = atomic64_add_return(delta, ptr);
227
228 return new_val < 0;
229}
230EXPORT_SYMBOL(atomic64_add_negative);