diff options
| author | Peter Zijlstra <peterz@infradead.org> | 2016-04-17 18:52:13 -0400 |
|---|---|---|
| committer | Ingo Molnar <mingo@kernel.org> | 2016-06-16 04:48:31 -0400 |
| commit | e12133324b7daaa176bb687c1eb59e1a6b203da4 (patch) | |
| tree | a828a085d42435e26af1faf579dcc369659392e0 /include/linux/atomic.h | |
| parent | 6dc25876cdb17fd3906504dcabb9e537f8433000 (diff) | |
locking/atomic: Fix atomic64_relaxed() bits
We should only expand the atomic64 relaxed bits once we've included
all relevant headers. So move it down until after we potentially
include asm-generic/atomic64.h.
In practise this will not have made a difference so far, since the
generic bits will not define _relaxed versions.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Davidlohr Bueso <dave@stgolabs.net>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Will Deacon <will.deacon@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'include/linux/atomic.h')
| -rw-r--r-- | include/linux/atomic.h | 306 |
1 files changed, 153 insertions, 153 deletions
diff --git a/include/linux/atomic.h b/include/linux/atomic.h index e451534fe54d..351f89e1d15c 100644 --- a/include/linux/atomic.h +++ b/include/linux/atomic.h | |||
| @@ -211,159 +211,6 @@ | |||
| 211 | #endif | 211 | #endif |
| 212 | #endif /* atomic_cmpxchg_relaxed */ | 212 | #endif /* atomic_cmpxchg_relaxed */ |
| 213 | 213 | ||
| 214 | #ifndef atomic64_read_acquire | ||
| 215 | #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) | ||
| 216 | #endif | ||
| 217 | |||
| 218 | #ifndef atomic64_set_release | ||
| 219 | #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) | ||
| 220 | #endif | ||
| 221 | |||
| 222 | /* atomic64_add_return_relaxed */ | ||
| 223 | #ifndef atomic64_add_return_relaxed | ||
| 224 | #define atomic64_add_return_relaxed atomic64_add_return | ||
| 225 | #define atomic64_add_return_acquire atomic64_add_return | ||
| 226 | #define atomic64_add_return_release atomic64_add_return | ||
| 227 | |||
| 228 | #else /* atomic64_add_return_relaxed */ | ||
| 229 | |||
| 230 | #ifndef atomic64_add_return_acquire | ||
| 231 | #define atomic64_add_return_acquire(...) \ | ||
| 232 | __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) | ||
| 233 | #endif | ||
| 234 | |||
| 235 | #ifndef atomic64_add_return_release | ||
| 236 | #define atomic64_add_return_release(...) \ | ||
| 237 | __atomic_op_release(atomic64_add_return, __VA_ARGS__) | ||
| 238 | #endif | ||
| 239 | |||
| 240 | #ifndef atomic64_add_return | ||
| 241 | #define atomic64_add_return(...) \ | ||
| 242 | __atomic_op_fence(atomic64_add_return, __VA_ARGS__) | ||
| 243 | #endif | ||
| 244 | #endif /* atomic64_add_return_relaxed */ | ||
| 245 | |||
| 246 | /* atomic64_inc_return_relaxed */ | ||
| 247 | #ifndef atomic64_inc_return_relaxed | ||
| 248 | #define atomic64_inc_return_relaxed atomic64_inc_return | ||
| 249 | #define atomic64_inc_return_acquire atomic64_inc_return | ||
| 250 | #define atomic64_inc_return_release atomic64_inc_return | ||
| 251 | |||
| 252 | #else /* atomic64_inc_return_relaxed */ | ||
| 253 | |||
| 254 | #ifndef atomic64_inc_return_acquire | ||
| 255 | #define atomic64_inc_return_acquire(...) \ | ||
| 256 | __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) | ||
| 257 | #endif | ||
| 258 | |||
| 259 | #ifndef atomic64_inc_return_release | ||
| 260 | #define atomic64_inc_return_release(...) \ | ||
| 261 | __atomic_op_release(atomic64_inc_return, __VA_ARGS__) | ||
| 262 | #endif | ||
| 263 | |||
| 264 | #ifndef atomic64_inc_return | ||
| 265 | #define atomic64_inc_return(...) \ | ||
| 266 | __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) | ||
| 267 | #endif | ||
| 268 | #endif /* atomic64_inc_return_relaxed */ | ||
| 269 | |||
| 270 | |||
| 271 | /* atomic64_sub_return_relaxed */ | ||
| 272 | #ifndef atomic64_sub_return_relaxed | ||
| 273 | #define atomic64_sub_return_relaxed atomic64_sub_return | ||
| 274 | #define atomic64_sub_return_acquire atomic64_sub_return | ||
| 275 | #define atomic64_sub_return_release atomic64_sub_return | ||
| 276 | |||
| 277 | #else /* atomic64_sub_return_relaxed */ | ||
| 278 | |||
| 279 | #ifndef atomic64_sub_return_acquire | ||
| 280 | #define atomic64_sub_return_acquire(...) \ | ||
| 281 | __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) | ||
| 282 | #endif | ||
| 283 | |||
| 284 | #ifndef atomic64_sub_return_release | ||
| 285 | #define atomic64_sub_return_release(...) \ | ||
| 286 | __atomic_op_release(atomic64_sub_return, __VA_ARGS__) | ||
| 287 | #endif | ||
| 288 | |||
| 289 | #ifndef atomic64_sub_return | ||
| 290 | #define atomic64_sub_return(...) \ | ||
| 291 | __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) | ||
| 292 | #endif | ||
| 293 | #endif /* atomic64_sub_return_relaxed */ | ||
| 294 | |||
| 295 | /* atomic64_dec_return_relaxed */ | ||
| 296 | #ifndef atomic64_dec_return_relaxed | ||
| 297 | #define atomic64_dec_return_relaxed atomic64_dec_return | ||
| 298 | #define atomic64_dec_return_acquire atomic64_dec_return | ||
| 299 | #define atomic64_dec_return_release atomic64_dec_return | ||
| 300 | |||
| 301 | #else /* atomic64_dec_return_relaxed */ | ||
| 302 | |||
| 303 | #ifndef atomic64_dec_return_acquire | ||
| 304 | #define atomic64_dec_return_acquire(...) \ | ||
| 305 | __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) | ||
| 306 | #endif | ||
| 307 | |||
| 308 | #ifndef atomic64_dec_return_release | ||
| 309 | #define atomic64_dec_return_release(...) \ | ||
| 310 | __atomic_op_release(atomic64_dec_return, __VA_ARGS__) | ||
| 311 | #endif | ||
| 312 | |||
| 313 | #ifndef atomic64_dec_return | ||
| 314 | #define atomic64_dec_return(...) \ | ||
| 315 | __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) | ||
| 316 | #endif | ||
| 317 | #endif /* atomic64_dec_return_relaxed */ | ||
| 318 | |||
| 319 | /* atomic64_xchg_relaxed */ | ||
| 320 | #ifndef atomic64_xchg_relaxed | ||
| 321 | #define atomic64_xchg_relaxed atomic64_xchg | ||
| 322 | #define atomic64_xchg_acquire atomic64_xchg | ||
| 323 | #define atomic64_xchg_release atomic64_xchg | ||
| 324 | |||
| 325 | #else /* atomic64_xchg_relaxed */ | ||
| 326 | |||
| 327 | #ifndef atomic64_xchg_acquire | ||
| 328 | #define atomic64_xchg_acquire(...) \ | ||
| 329 | __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) | ||
| 330 | #endif | ||
| 331 | |||
| 332 | #ifndef atomic64_xchg_release | ||
| 333 | #define atomic64_xchg_release(...) \ | ||
| 334 | __atomic_op_release(atomic64_xchg, __VA_ARGS__) | ||
| 335 | #endif | ||
| 336 | |||
| 337 | #ifndef atomic64_xchg | ||
| 338 | #define atomic64_xchg(...) \ | ||
| 339 | __atomic_op_fence(atomic64_xchg, __VA_ARGS__) | ||
| 340 | #endif | ||
| 341 | #endif /* atomic64_xchg_relaxed */ | ||
| 342 | |||
| 343 | /* atomic64_cmpxchg_relaxed */ | ||
| 344 | #ifndef atomic64_cmpxchg_relaxed | ||
| 345 | #define atomic64_cmpxchg_relaxed atomic64_cmpxchg | ||
| 346 | #define atomic64_cmpxchg_acquire atomic64_cmpxchg | ||
| 347 | #define atomic64_cmpxchg_release atomic64_cmpxchg | ||
| 348 | |||
| 349 | #else /* atomic64_cmpxchg_relaxed */ | ||
| 350 | |||
| 351 | #ifndef atomic64_cmpxchg_acquire | ||
| 352 | #define atomic64_cmpxchg_acquire(...) \ | ||
| 353 | __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) | ||
| 354 | #endif | ||
| 355 | |||
| 356 | #ifndef atomic64_cmpxchg_release | ||
| 357 | #define atomic64_cmpxchg_release(...) \ | ||
| 358 | __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) | ||
| 359 | #endif | ||
| 360 | |||
| 361 | #ifndef atomic64_cmpxchg | ||
| 362 | #define atomic64_cmpxchg(...) \ | ||
| 363 | __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) | ||
| 364 | #endif | ||
| 365 | #endif /* atomic64_cmpxchg_relaxed */ | ||
| 366 | |||
| 367 | /* cmpxchg_relaxed */ | 214 | /* cmpxchg_relaxed */ |
| 368 | #ifndef cmpxchg_relaxed | 215 | #ifndef cmpxchg_relaxed |
| 369 | #define cmpxchg_relaxed cmpxchg | 216 | #define cmpxchg_relaxed cmpxchg |
| @@ -583,6 +430,159 @@ static inline int atomic_fetch_or(int mask, atomic_t *p) | |||
| 583 | #include <asm-generic/atomic64.h> | 430 | #include <asm-generic/atomic64.h> |
| 584 | #endif | 431 | #endif |
| 585 | 432 | ||
| 433 | #ifndef atomic64_read_acquire | ||
| 434 | #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) | ||
| 435 | #endif | ||
| 436 | |||
| 437 | #ifndef atomic64_set_release | ||
| 438 | #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) | ||
| 439 | #endif | ||
| 440 | |||
| 441 | /* atomic64_add_return_relaxed */ | ||
| 442 | #ifndef atomic64_add_return_relaxed | ||
| 443 | #define atomic64_add_return_relaxed atomic64_add_return | ||
| 444 | #define atomic64_add_return_acquire atomic64_add_return | ||
| 445 | #define atomic64_add_return_release atomic64_add_return | ||
| 446 | |||
| 447 | #else /* atomic64_add_return_relaxed */ | ||
| 448 | |||
| 449 | #ifndef atomic64_add_return_acquire | ||
| 450 | #define atomic64_add_return_acquire(...) \ | ||
| 451 | __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) | ||
| 452 | #endif | ||
| 453 | |||
| 454 | #ifndef atomic64_add_return_release | ||
| 455 | #define atomic64_add_return_release(...) \ | ||
| 456 | __atomic_op_release(atomic64_add_return, __VA_ARGS__) | ||
| 457 | #endif | ||
| 458 | |||
| 459 | #ifndef atomic64_add_return | ||
| 460 | #define atomic64_add_return(...) \ | ||
| 461 | __atomic_op_fence(atomic64_add_return, __VA_ARGS__) | ||
| 462 | #endif | ||
| 463 | #endif /* atomic64_add_return_relaxed */ | ||
| 464 | |||
| 465 | /* atomic64_inc_return_relaxed */ | ||
| 466 | #ifndef atomic64_inc_return_relaxed | ||
| 467 | #define atomic64_inc_return_relaxed atomic64_inc_return | ||
| 468 | #define atomic64_inc_return_acquire atomic64_inc_return | ||
| 469 | #define atomic64_inc_return_release atomic64_inc_return | ||
| 470 | |||
| 471 | #else /* atomic64_inc_return_relaxed */ | ||
| 472 | |||
| 473 | #ifndef atomic64_inc_return_acquire | ||
| 474 | #define atomic64_inc_return_acquire(...) \ | ||
| 475 | __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) | ||
| 476 | #endif | ||
| 477 | |||
| 478 | #ifndef atomic64_inc_return_release | ||
| 479 | #define atomic64_inc_return_release(...) \ | ||
| 480 | __atomic_op_release(atomic64_inc_return, __VA_ARGS__) | ||
| 481 | #endif | ||
| 482 | |||
| 483 | #ifndef atomic64_inc_return | ||
| 484 | #define atomic64_inc_return(...) \ | ||
| 485 | __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) | ||
| 486 | #endif | ||
| 487 | #endif /* atomic64_inc_return_relaxed */ | ||
| 488 | |||
| 489 | |||
| 490 | /* atomic64_sub_return_relaxed */ | ||
| 491 | #ifndef atomic64_sub_return_relaxed | ||
| 492 | #define atomic64_sub_return_relaxed atomic64_sub_return | ||
| 493 | #define atomic64_sub_return_acquire atomic64_sub_return | ||
| 494 | #define atomic64_sub_return_release atomic64_sub_return | ||
| 495 | |||
| 496 | #else /* atomic64_sub_return_relaxed */ | ||
| 497 | |||
| 498 | #ifndef atomic64_sub_return_acquire | ||
| 499 | #define atomic64_sub_return_acquire(...) \ | ||
| 500 | __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) | ||
| 501 | #endif | ||
| 502 | |||
| 503 | #ifndef atomic64_sub_return_release | ||
| 504 | #define atomic64_sub_return_release(...) \ | ||
| 505 | __atomic_op_release(atomic64_sub_return, __VA_ARGS__) | ||
| 506 | #endif | ||
| 507 | |||
| 508 | #ifndef atomic64_sub_return | ||
| 509 | #define atomic64_sub_return(...) \ | ||
| 510 | __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) | ||
| 511 | #endif | ||
| 512 | #endif /* atomic64_sub_return_relaxed */ | ||
| 513 | |||
| 514 | /* atomic64_dec_return_relaxed */ | ||
| 515 | #ifndef atomic64_dec_return_relaxed | ||
| 516 | #define atomic64_dec_return_relaxed atomic64_dec_return | ||
| 517 | #define atomic64_dec_return_acquire atomic64_dec_return | ||
| 518 | #define atomic64_dec_return_release atomic64_dec_return | ||
| 519 | |||
| 520 | #else /* atomic64_dec_return_relaxed */ | ||
| 521 | |||
| 522 | #ifndef atomic64_dec_return_acquire | ||
| 523 | #define atomic64_dec_return_acquire(...) \ | ||
| 524 | __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) | ||
| 525 | #endif | ||
| 526 | |||
| 527 | #ifndef atomic64_dec_return_release | ||
| 528 | #define atomic64_dec_return_release(...) \ | ||
| 529 | __atomic_op_release(atomic64_dec_return, __VA_ARGS__) | ||
| 530 | #endif | ||
| 531 | |||
| 532 | #ifndef atomic64_dec_return | ||
| 533 | #define atomic64_dec_return(...) \ | ||
| 534 | __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) | ||
| 535 | #endif | ||
| 536 | #endif /* atomic64_dec_return_relaxed */ | ||
| 537 | |||
| 538 | /* atomic64_xchg_relaxed */ | ||
| 539 | #ifndef atomic64_xchg_relaxed | ||
| 540 | #define atomic64_xchg_relaxed atomic64_xchg | ||
| 541 | #define atomic64_xchg_acquire atomic64_xchg | ||
| 542 | #define atomic64_xchg_release atomic64_xchg | ||
| 543 | |||
| 544 | #else /* atomic64_xchg_relaxed */ | ||
| 545 | |||
| 546 | #ifndef atomic64_xchg_acquire | ||
| 547 | #define atomic64_xchg_acquire(...) \ | ||
| 548 | __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) | ||
| 549 | #endif | ||
| 550 | |||
| 551 | #ifndef atomic64_xchg_release | ||
| 552 | #define atomic64_xchg_release(...) \ | ||
| 553 | __atomic_op_release(atomic64_xchg, __VA_ARGS__) | ||
| 554 | #endif | ||
| 555 | |||
| 556 | #ifndef atomic64_xchg | ||
| 557 | #define atomic64_xchg(...) \ | ||
| 558 | __atomic_op_fence(atomic64_xchg, __VA_ARGS__) | ||
| 559 | #endif | ||
| 560 | #endif /* atomic64_xchg_relaxed */ | ||
| 561 | |||
| 562 | /* atomic64_cmpxchg_relaxed */ | ||
| 563 | #ifndef atomic64_cmpxchg_relaxed | ||
| 564 | #define atomic64_cmpxchg_relaxed atomic64_cmpxchg | ||
| 565 | #define atomic64_cmpxchg_acquire atomic64_cmpxchg | ||
| 566 | #define atomic64_cmpxchg_release atomic64_cmpxchg | ||
| 567 | |||
| 568 | #else /* atomic64_cmpxchg_relaxed */ | ||
| 569 | |||
| 570 | #ifndef atomic64_cmpxchg_acquire | ||
| 571 | #define atomic64_cmpxchg_acquire(...) \ | ||
| 572 | __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) | ||
| 573 | #endif | ||
| 574 | |||
| 575 | #ifndef atomic64_cmpxchg_release | ||
| 576 | #define atomic64_cmpxchg_release(...) \ | ||
| 577 | __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) | ||
| 578 | #endif | ||
| 579 | |||
| 580 | #ifndef atomic64_cmpxchg | ||
| 581 | #define atomic64_cmpxchg(...) \ | ||
| 582 | __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) | ||
| 583 | #endif | ||
| 584 | #endif /* atomic64_cmpxchg_relaxed */ | ||
| 585 | |||
| 586 | #ifndef atomic64_andnot | 586 | #ifndef atomic64_andnot |
| 587 | static inline void atomic64_andnot(long long i, atomic64_t *v) | 587 | static inline void atomic64_andnot(long long i, atomic64_t *v) |
| 588 | { | 588 | { |
