diff options
author | Petr Tesarik <ptesarik@suse.cz> | 2010-09-15 18:35:48 -0400 |
---|---|---|
committer | Tony Luck <tony.luck@intel.com> | 2010-09-15 18:35:48 -0400 |
commit | 2d2b6901649a62977452be85df53eda2412def24 (patch) | |
tree | 622553b837759352a334a350d52f1f2d7652c9c1 /arch/ia64 | |
parent | bfa88ea7ee9e6b4fd673e45a8cc0a8e0b7ef4761 (diff) |
[IA64] Optimize ticket spinlocks in fsys_rt_sigprocmask
Tony's fix (f574c843191728d9407b766a027f779dcd27b272) has a small bug,
it incorrectly uses "r3" as a scratch register in the first of the two
unlock paths ... it is also inefficient. Optimize the fast path again.
Signed-off-by: Petr Tesarik <ptesarik@suse.cz>
Signed-off-by: Tony Luck <tony.luck@intel.com>
Diffstat (limited to 'arch/ia64')
-rw-r--r-- | arch/ia64/kernel/fsys.S | 42 |
1 files changed, 11 insertions, 31 deletions
diff --git a/arch/ia64/kernel/fsys.S b/arch/ia64/kernel/fsys.S index 471a1e783aca..331d42bda77a 100644 --- a/arch/ia64/kernel/fsys.S +++ b/arch/ia64/kernel/fsys.S | |||
@@ -420,34 +420,31 @@ EX(.fail_efault, ld8 r14=[r33]) // r14 <- *set | |||
420 | ;; | 420 | ;; |
421 | 421 | ||
422 | RSM_PSR_I(p0, r18, r19) // mask interrupt delivery | 422 | RSM_PSR_I(p0, r18, r19) // mask interrupt delivery |
423 | mov ar.ccv=0 | ||
424 | andcm r14=r14,r17 // filter out SIGKILL & SIGSTOP | 423 | andcm r14=r14,r17 // filter out SIGKILL & SIGSTOP |
424 | mov r8=EINVAL // default to EINVAL | ||
425 | 425 | ||
426 | #ifdef CONFIG_SMP | 426 | #ifdef CONFIG_SMP |
427 | // __ticket_spin_trylock(r31) | 427 | // __ticket_spin_trylock(r31) |
428 | ld4 r17=[r31] | 428 | ld4 r17=[r31] |
429 | mov r8=EINVAL // default to EINVAL | ||
430 | ;; | ||
431 | extr r9=r17,17,15 | ||
432 | ;; | 429 | ;; |
433 | xor r18=r17,r9 | 430 | mov.m ar.ccv=r17 |
431 | extr.u r9=r17,17,15 | ||
434 | adds r19=1,r17 | 432 | adds r19=1,r17 |
433 | extr.u r18=r17,0,15 | ||
435 | ;; | 434 | ;; |
436 | extr.u r18=r18,0,15 | 435 | cmp.eq p6,p7=r9,r18 |
437 | ;; | 436 | ;; |
438 | cmp.eq p0,p7=0,r18 | 437 | (p6) cmpxchg4.acq r9=[r31],r19,ar.ccv |
438 | (p6) dep.z r20=r19,1,15 // next serving ticket for unlock | ||
439 | (p7) br.cond.spnt.many .lock_contention | 439 | (p7) br.cond.spnt.many .lock_contention |
440 | mov.m ar.ccv=r17 | ||
441 | ;; | ||
442 | cmpxchg4.acq r9=[r31],r19,ar.ccv | ||
443 | ;; | 440 | ;; |
444 | cmp4.eq p0,p7=r9,r17 | 441 | cmp4.eq p0,p7=r9,r17 |
442 | adds r31=2,r31 | ||
445 | (p7) br.cond.spnt.many .lock_contention | 443 | (p7) br.cond.spnt.many .lock_contention |
446 | ld8 r3=[r2] // re-read current->blocked now that we hold the lock | 444 | ld8 r3=[r2] // re-read current->blocked now that we hold the lock |
447 | ;; | 445 | ;; |
448 | #else | 446 | #else |
449 | ld8 r3=[r2] // re-read current->blocked now that we hold the lock | 447 | ld8 r3=[r2] // re-read current->blocked now that we hold the lock |
450 | mov r8=EINVAL // default to EINVAL | ||
451 | #endif | 448 | #endif |
452 | add r18=IA64_TASK_PENDING_OFFSET+IA64_SIGPENDING_SIGNAL_OFFSET,r16 | 449 | add r18=IA64_TASK_PENDING_OFFSET+IA64_SIGPENDING_SIGNAL_OFFSET,r16 |
453 | add r19=IA64_TASK_SIGNAL_OFFSET,r16 | 450 | add r19=IA64_TASK_SIGNAL_OFFSET,r16 |
@@ -503,16 +500,8 @@ EX(.fail_efault, ld8 r14=[r33]) // r14 <- *set | |||
503 | 500 | ||
504 | #ifdef CONFIG_SMP | 501 | #ifdef CONFIG_SMP |
505 | // __ticket_spin_unlock(r31) | 502 | // __ticket_spin_unlock(r31) |
506 | adds r31=2,r31 | 503 | st2.rel [r31]=r20 |
507 | ;; | 504 | mov r20=0 // i must not leak kernel bits... |
508 | ld2.bias r2=[r31] | ||
509 | mov r3=65534 | ||
510 | ;; | ||
511 | adds r2=2,r2 | ||
512 | ;; | ||
513 | and r3=r3,r2 | ||
514 | ;; | ||
515 | st2.rel [r31]=r3 | ||
516 | #endif | 505 | #endif |
517 | SSM_PSR_I(p0, p9, r31) | 506 | SSM_PSR_I(p0, p9, r31) |
518 | ;; | 507 | ;; |
@@ -535,16 +524,7 @@ EX(.fail_efault, (p15) st8 [r34]=r3) | |||
535 | .sig_pending: | 524 | .sig_pending: |
536 | #ifdef CONFIG_SMP | 525 | #ifdef CONFIG_SMP |
537 | // __ticket_spin_unlock(r31) | 526 | // __ticket_spin_unlock(r31) |
538 | adds r31=2,r31 | 527 | st2.rel [r31]=r20 // release the lock |
539 | ;; | ||
540 | ld2.bias r2=[r31] | ||
541 | mov r3=65534 | ||
542 | ;; | ||
543 | adds r2=2,r2 | ||
544 | ;; | ||
545 | and r3=r3,r2 | ||
546 | ;; | ||
547 | st2.rel [r31]=r3 | ||
548 | #endif | 528 | #endif |
549 | SSM_PSR_I(p0, p9, r17) | 529 | SSM_PSR_I(p0, p9, r17) |
550 | ;; | 530 | ;; |