aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-s390
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-s390')
-rw-r--r--include/asm-s390/atomic.h8
-rw-r--r--include/asm-s390/bitops.h440
-rw-r--r--include/asm-s390/lowcore.h4
-rw-r--r--include/asm-s390/processor.h5
-rw-r--r--include/asm-s390/spinlock.h252
5 files changed, 227 insertions, 482 deletions
diff --git a/include/asm-s390/atomic.h b/include/asm-s390/atomic.h
index d5a05cf47168..9d86ba6f12d0 100644
--- a/include/asm-s390/atomic.h
+++ b/include/asm-s390/atomic.h
@@ -123,19 +123,19 @@ typedef struct {
123#define atomic64_read(v) ((v)->counter) 123#define atomic64_read(v) ((v)->counter)
124#define atomic64_set(v,i) (((v)->counter) = (i)) 124#define atomic64_set(v,i) (((v)->counter) = (i))
125 125
126static __inline__ void atomic64_add(int i, atomic64_t * v) 126static __inline__ void atomic64_add(long long i, atomic64_t * v)
127{ 127{
128 __CSG_LOOP(v, i, "agr"); 128 __CSG_LOOP(v, i, "agr");
129} 129}
130static __inline__ long long atomic64_add_return(int i, atomic64_t * v) 130static __inline__ long long atomic64_add_return(long long i, atomic64_t * v)
131{ 131{
132 return __CSG_LOOP(v, i, "agr"); 132 return __CSG_LOOP(v, i, "agr");
133} 133}
134static __inline__ long long atomic64_add_negative(int i, atomic64_t * v) 134static __inline__ long long atomic64_add_negative(long long i, atomic64_t * v)
135{ 135{
136 return __CSG_LOOP(v, i, "agr") < 0; 136 return __CSG_LOOP(v, i, "agr") < 0;
137} 137}
138static __inline__ void atomic64_sub(int i, atomic64_t * v) 138static __inline__ void atomic64_sub(long long i, atomic64_t * v)
139{ 139{
140 __CSG_LOOP(v, i, "sgr"); 140 __CSG_LOOP(v, i, "sgr");
141} 141}
diff --git a/include/asm-s390/bitops.h b/include/asm-s390/bitops.h
index 16bb08499c7f..8651524217fd 100644
--- a/include/asm-s390/bitops.h
+++ b/include/asm-s390/bitops.h
@@ -527,13 +527,64 @@ __constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
527 __constant_test_bit((nr),(addr)) : \ 527 __constant_test_bit((nr),(addr)) : \
528 __test_bit((nr),(addr)) ) 528 __test_bit((nr),(addr)) )
529 529
530#ifndef __s390x__ 530/*
531 * ffz = Find First Zero in word. Undefined if no zero exists,
532 * so code should check against ~0UL first..
533 */
534static inline unsigned long ffz(unsigned long word)
535{
536 unsigned long bit = 0;
537
538#ifdef __s390x__
539 if (likely((word & 0xffffffff) == 0xffffffff)) {
540 word >>= 32;
541 bit += 32;
542 }
543#endif
544 if (likely((word & 0xffff) == 0xffff)) {
545 word >>= 16;
546 bit += 16;
547 }
548 if (likely((word & 0xff) == 0xff)) {
549 word >>= 8;
550 bit += 8;
551 }
552 return bit + _zb_findmap[word & 0xff];
553}
554
555/*
556 * __ffs = find first bit in word. Undefined if no bit exists,
557 * so code should check against 0UL first..
558 */
559static inline unsigned long __ffs (unsigned long word)
560{
561 unsigned long bit = 0;
562
563#ifdef __s390x__
564 if (likely((word & 0xffffffff) == 0)) {
565 word >>= 32;
566 bit += 32;
567 }
568#endif
569 if (likely((word & 0xffff) == 0)) {
570 word >>= 16;
571 bit += 16;
572 }
573 if (likely((word & 0xff) == 0)) {
574 word >>= 8;
575 bit += 8;
576 }
577 return bit + _sb_findmap[word & 0xff];
578}
531 579
532/* 580/*
533 * Find-bit routines.. 581 * Find-bit routines..
534 */ 582 */
583
584#ifndef __s390x__
585
535static inline int 586static inline int
536find_first_zero_bit(const unsigned long * addr, unsigned int size) 587find_first_zero_bit(const unsigned long * addr, unsigned long size)
537{ 588{
538 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 589 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
539 unsigned long cmp, count; 590 unsigned long cmp, count;
@@ -548,7 +599,7 @@ find_first_zero_bit(const unsigned long * addr, unsigned int size)
548 " srl %2,5\n" 599 " srl %2,5\n"
549 "0: c %1,0(%0,%4)\n" 600 "0: c %1,0(%0,%4)\n"
550 " jne 1f\n" 601 " jne 1f\n"
551 " ahi %0,4\n" 602 " la %0,4(%0)\n"
552 " brct %2,0b\n" 603 " brct %2,0b\n"
553 " lr %0,%3\n" 604 " lr %0,%3\n"
554 " j 4f\n" 605 " j 4f\n"
@@ -574,7 +625,7 @@ find_first_zero_bit(const unsigned long * addr, unsigned int size)
574} 625}
575 626
576static inline int 627static inline int
577find_first_bit(const unsigned long * addr, unsigned int size) 628find_first_bit(const unsigned long * addr, unsigned long size)
578{ 629{
579 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 630 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
580 unsigned long cmp, count; 631 unsigned long cmp, count;
@@ -589,7 +640,7 @@ find_first_bit(const unsigned long * addr, unsigned int size)
589 " srl %2,5\n" 640 " srl %2,5\n"
590 "0: c %1,0(%0,%4)\n" 641 "0: c %1,0(%0,%4)\n"
591 " jne 1f\n" 642 " jne 1f\n"
592 " ahi %0,4\n" 643 " la %0,4(%0)\n"
593 " brct %2,0b\n" 644 " brct %2,0b\n"
594 " lr %0,%3\n" 645 " lr %0,%3\n"
595 " j 4f\n" 646 " j 4f\n"
@@ -614,89 +665,8 @@ find_first_bit(const unsigned long * addr, unsigned int size)
614 return (res < size) ? res : size; 665 return (res < size) ? res : size;
615} 666}
616 667
617static inline int
618find_next_zero_bit (const unsigned long * addr, int size, int offset)
619{
620 unsigned long * p = ((unsigned long *) addr) + (offset >> 5);
621 unsigned long bitvec, reg;
622 int set, bit = offset & 31, res;
623
624 if (bit) {
625 /*
626 * Look for zero in first word
627 */
628 bitvec = (*p) >> bit;
629 __asm__(" slr %0,%0\n"
630 " lhi %2,0xff\n"
631 " tml %1,0xffff\n"
632 " jno 0f\n"
633 " ahi %0,16\n"
634 " srl %1,16\n"
635 "0: tml %1,0x00ff\n"
636 " jno 1f\n"
637 " ahi %0,8\n"
638 " srl %1,8\n"
639 "1: nr %1,%2\n"
640 " ic %1,0(%1,%3)\n"
641 " alr %0,%1"
642 : "=&d" (set), "+a" (bitvec), "=&d" (reg)
643 : "a" (&_zb_findmap) : "cc" );
644 if (set < (32 - bit))
645 return set + offset;
646 offset += 32 - bit;
647 p++;
648 }
649 /*
650 * No zero yet, search remaining full words for a zero
651 */
652 res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr));
653 return (offset + res);
654}
655
656static inline int
657find_next_bit (const unsigned long * addr, int size, int offset)
658{
659 unsigned long * p = ((unsigned long *) addr) + (offset >> 5);
660 unsigned long bitvec, reg;
661 int set, bit = offset & 31, res;
662
663 if (bit) {
664 /*
665 * Look for set bit in first word
666 */
667 bitvec = (*p) >> bit;
668 __asm__(" slr %0,%0\n"
669 " lhi %2,0xff\n"
670 " tml %1,0xffff\n"
671 " jnz 0f\n"
672 " ahi %0,16\n"
673 " srl %1,16\n"
674 "0: tml %1,0x00ff\n"
675 " jnz 1f\n"
676 " ahi %0,8\n"
677 " srl %1,8\n"
678 "1: nr %1,%2\n"
679 " ic %1,0(%1,%3)\n"
680 " alr %0,%1"
681 : "=&d" (set), "+a" (bitvec), "=&d" (reg)
682 : "a" (&_sb_findmap) : "cc" );
683 if (set < (32 - bit))
684 return set + offset;
685 offset += 32 - bit;
686 p++;
687 }
688 /*
689 * No set bit yet, search remaining full words for a bit
690 */
691 res = find_first_bit (p, size - 32 * (p - (unsigned long *) addr));
692 return (offset + res);
693}
694
695#else /* __s390x__ */ 668#else /* __s390x__ */
696 669
697/*
698 * Find-bit routines..
699 */
700static inline unsigned long 670static inline unsigned long
701find_first_zero_bit(const unsigned long * addr, unsigned long size) 671find_first_zero_bit(const unsigned long * addr, unsigned long size)
702{ 672{
@@ -712,7 +682,7 @@ find_first_zero_bit(const unsigned long * addr, unsigned long size)
712 " srlg %2,%2,6\n" 682 " srlg %2,%2,6\n"
713 "0: cg %1,0(%0,%4)\n" 683 "0: cg %1,0(%0,%4)\n"
714 " jne 1f\n" 684 " jne 1f\n"
715 " aghi %0,8\n" 685 " la %0,8(%0)\n"
716 " brct %2,0b\n" 686 " brct %2,0b\n"
717 " lgr %0,%3\n" 687 " lgr %0,%3\n"
718 " j 5f\n" 688 " j 5f\n"
@@ -785,143 +755,66 @@ find_first_bit(const unsigned long * addr, unsigned long size)
785 return (res < size) ? res : size; 755 return (res < size) ? res : size;
786} 756}
787 757
788static inline unsigned long
789find_next_zero_bit (const unsigned long * addr, unsigned long size, unsigned long offset)
790{
791 unsigned long * p = ((unsigned long *) addr) + (offset >> 6);
792 unsigned long bitvec, reg;
793 unsigned long set, bit = offset & 63, res;
794
795 if (bit) {
796 /*
797 * Look for zero in first word
798 */
799 bitvec = (*p) >> bit;
800 __asm__(" lhi %2,-1\n"
801 " slgr %0,%0\n"
802 " clr %1,%2\n"
803 " jne 0f\n"
804 " aghi %0,32\n"
805 " srlg %1,%1,32\n"
806 "0: lghi %2,0xff\n"
807 " tmll %1,0xffff\n"
808 " jno 1f\n"
809 " aghi %0,16\n"
810 " srlg %1,%1,16\n"
811 "1: tmll %1,0x00ff\n"
812 " jno 2f\n"
813 " aghi %0,8\n"
814 " srlg %1,%1,8\n"
815 "2: ngr %1,%2\n"
816 " ic %1,0(%1,%3)\n"
817 " algr %0,%1"
818 : "=&d" (set), "+a" (bitvec), "=&d" (reg)
819 : "a" (&_zb_findmap) : "cc" );
820 if (set < (64 - bit))
821 return set + offset;
822 offset += 64 - bit;
823 p++;
824 }
825 /*
826 * No zero yet, search remaining full words for a zero
827 */
828 res = find_first_zero_bit (p, size - 64 * (p - (unsigned long *) addr));
829 return (offset + res);
830}
831
832static inline unsigned long
833find_next_bit (const unsigned long * addr, unsigned long size, unsigned long offset)
834{
835 unsigned long * p = ((unsigned long *) addr) + (offset >> 6);
836 unsigned long bitvec, reg;
837 unsigned long set, bit = offset & 63, res;
838
839 if (bit) {
840 /*
841 * Look for zero in first word
842 */
843 bitvec = (*p) >> bit;
844 __asm__(" slgr %0,%0\n"
845 " ltr %1,%1\n"
846 " jnz 0f\n"
847 " aghi %0,32\n"
848 " srlg %1,%1,32\n"
849 "0: lghi %2,0xff\n"
850 " tmll %1,0xffff\n"
851 " jnz 1f\n"
852 " aghi %0,16\n"
853 " srlg %1,%1,16\n"
854 "1: tmll %1,0x00ff\n"
855 " jnz 2f\n"
856 " aghi %0,8\n"
857 " srlg %1,%1,8\n"
858 "2: ngr %1,%2\n"
859 " ic %1,0(%1,%3)\n"
860 " algr %0,%1"
861 : "=&d" (set), "+a" (bitvec), "=&d" (reg)
862 : "a" (&_sb_findmap) : "cc" );
863 if (set < (64 - bit))
864 return set + offset;
865 offset += 64 - bit;
866 p++;
867 }
868 /*
869 * No set bit yet, search remaining full words for a bit
870 */
871 res = find_first_bit (p, size - 64 * (p - (unsigned long *) addr));
872 return (offset + res);
873}
874
875#endif /* __s390x__ */ 758#endif /* __s390x__ */
876 759
877/* 760static inline int
878 * ffz = Find First Zero in word. Undefined if no zero exists, 761find_next_zero_bit (const unsigned long * addr, unsigned long size,
879 * so code should check against ~0UL first.. 762 unsigned long offset)
880 */
881static inline unsigned long ffz(unsigned long word)
882{ 763{
883 unsigned long bit = 0; 764 const unsigned long *p;
884 765 unsigned long bit, set;
885#ifdef __s390x__ 766
886 if (likely((word & 0xffffffff) == 0xffffffff)) { 767 if (offset >= size)
887 word >>= 32; 768 return size;
888 bit += 32; 769 bit = offset & (__BITOPS_WORDSIZE - 1);
889 } 770 offset -= bit;
890#endif 771 size -= offset;
891 if (likely((word & 0xffff) == 0xffff)) { 772 p = addr + offset / __BITOPS_WORDSIZE;
892 word >>= 16; 773 if (bit) {
893 bit += 16; 774 /*
775 * s390 version of ffz returns __BITOPS_WORDSIZE
776 * if no zero bit is present in the word.
777 */
778 set = ffz(*p >> bit) + bit;
779 if (set >= size)
780 return size + offset;
781 if (set < __BITOPS_WORDSIZE)
782 return set + offset;
783 offset += __BITOPS_WORDSIZE;
784 size -= __BITOPS_WORDSIZE;
785 p++;
894 } 786 }
895 if (likely((word & 0xff) == 0xff)) { 787 return offset + find_first_zero_bit(p, size);
896 word >>= 8;
897 bit += 8;
898 }
899 return bit + _zb_findmap[word & 0xff];
900} 788}
901 789
902/* 790static inline int
903 * __ffs = find first bit in word. Undefined if no bit exists, 791find_next_bit (const unsigned long * addr, unsigned long size,
904 * so code should check against 0UL first.. 792 unsigned long offset)
905 */
906static inline unsigned long __ffs (unsigned long word)
907{ 793{
908 unsigned long bit = 0; 794 const unsigned long *p;
909 795 unsigned long bit, set;
910#ifdef __s390x__ 796
911 if (likely((word & 0xffffffff) == 0)) { 797 if (offset >= size)
912 word >>= 32; 798 return size;
913 bit += 32; 799 bit = offset & (__BITOPS_WORDSIZE - 1);
800 offset -= bit;
801 size -= offset;
802 p = addr + offset / __BITOPS_WORDSIZE;
803 if (bit) {
804 /*
805 * s390 version of __ffs returns __BITOPS_WORDSIZE
806 * if no one bit is present in the word.
807 */
808 set = __ffs(*p & (~0UL << bit));
809 if (set >= size)
810 return size + offset;
811 if (set < __BITOPS_WORDSIZE)
812 return set + offset;
813 offset += __BITOPS_WORDSIZE;
814 size -= __BITOPS_WORDSIZE;
815 p++;
914 } 816 }
915#endif 817 return offset + find_first_bit(p, size);
916 if (likely((word & 0xffff) == 0)) {
917 word >>= 16;
918 bit += 16;
919 }
920 if (likely((word & 0xff) == 0)) {
921 word >>= 8;
922 bit += 8;
923 }
924 return bit + _sb_findmap[word & 0xff];
925} 818}
926 819
927/* 820/*
@@ -1031,49 +924,6 @@ ext2_find_first_zero_bit(void *vaddr, unsigned int size)
1031 return (res < size) ? res : size; 924 return (res < size) ? res : size;
1032} 925}
1033 926
1034static inline int
1035ext2_find_next_zero_bit(void *vaddr, unsigned int size, unsigned offset)
1036{
1037 unsigned long *addr = vaddr;
1038 unsigned long *p = addr + (offset >> 5);
1039 unsigned long word, reg;
1040 unsigned int bit = offset & 31UL, res;
1041
1042 if (offset >= size)
1043 return size;
1044
1045 if (bit) {
1046 __asm__(" ic %0,0(%1)\n"
1047 " icm %0,2,1(%1)\n"
1048 " icm %0,4,2(%1)\n"
1049 " icm %0,8,3(%1)"
1050 : "=&a" (word) : "a" (p) : "cc" );
1051 word >>= bit;
1052 res = bit;
1053 /* Look for zero in first longword */
1054 __asm__(" lhi %2,0xff\n"
1055 " tml %1,0xffff\n"
1056 " jno 0f\n"
1057 " ahi %0,16\n"
1058 " srl %1,16\n"
1059 "0: tml %1,0x00ff\n"
1060 " jno 1f\n"
1061 " ahi %0,8\n"
1062 " srl %1,8\n"
1063 "1: nr %1,%2\n"
1064 " ic %1,0(%1,%3)\n"
1065 " alr %0,%1"
1066 : "+&d" (res), "+&a" (word), "=&d" (reg)
1067 : "a" (&_zb_findmap) : "cc" );
1068 if (res < 32)
1069 return (p - addr)*32 + res;
1070 p++;
1071 }
1072 /* No zero yet, search remaining full bytes for a zero */
1073 res = ext2_find_first_zero_bit (p, size - 32 * (p - addr));
1074 return (p - addr) * 32 + res;
1075}
1076
1077#else /* __s390x__ */ 927#else /* __s390x__ */
1078 928
1079static inline unsigned long 929static inline unsigned long
@@ -1120,56 +970,46 @@ ext2_find_first_zero_bit(void *vaddr, unsigned long size)
1120 return (res < size) ? res : size; 970 return (res < size) ? res : size;
1121} 971}
1122 972
1123static inline unsigned long 973#endif /* __s390x__ */
974
975static inline int
1124ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset) 976ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset)
1125{ 977{
1126 unsigned long *addr = vaddr; 978 unsigned long *addr = vaddr, *p;
1127 unsigned long *p = addr + (offset >> 6); 979 unsigned long word, bit, set;
1128 unsigned long word, reg;
1129 unsigned long bit = offset & 63UL, res;
1130 980
1131 if (offset >= size) 981 if (offset >= size)
1132 return size; 982 return size;
1133 983 bit = offset & (__BITOPS_WORDSIZE - 1);
984 offset -= bit;
985 size -= offset;
986 p = addr + offset / __BITOPS_WORDSIZE;
1134 if (bit) { 987 if (bit) {
1135 __asm__(" lrvg %0,%1" /* load reversed, neat instruction */ 988#ifndef __s390x__
1136 : "=a" (word) : "m" (*p) ); 989 asm(" ic %0,0(%1)\n"
1137 word >>= bit; 990 " icm %0,2,1(%1)\n"
1138 res = bit; 991 " icm %0,4,2(%1)\n"
1139 /* Look for zero in first 8 byte word */ 992 " icm %0,8,3(%1)"
1140 __asm__(" lghi %2,0xff\n" 993 : "=&a" (word) : "a" (p), "m" (*p) : "cc" );
1141 " tmll %1,0xffff\n" 994#else
1142 " jno 2f\n" 995 asm(" lrvg %0,%1" : "=a" (word) : "m" (*p) );
1143 " ahi %0,16\n" 996#endif
1144 " srlg %1,%1,16\n" 997 /*
1145 "0: tmll %1,0xffff\n" 998 * s390 version of ffz returns __BITOPS_WORDSIZE
1146 " jno 2f\n" 999 * if no zero bit is present in the word.
1147 " ahi %0,16\n" 1000 */
1148 " srlg %1,%1,16\n" 1001 set = ffz(word >> bit) + bit;
1149 "1: tmll %1,0xffff\n" 1002 if (set >= size)
1150 " jno 2f\n" 1003 return size + offset;
1151 " ahi %0,16\n" 1004 if (set < __BITOPS_WORDSIZE)
1152 " srl %1,16\n" 1005 return set + offset;
1153 "2: tmll %1,0x00ff\n" 1006 offset += __BITOPS_WORDSIZE;
1154 " jno 3f\n" 1007 size -= __BITOPS_WORDSIZE;
1155 " ahi %0,8\n" 1008 p++;
1156 " srl %1,8\n"
1157 "3: ngr %1,%2\n"
1158 " ic %1,0(%1,%3)\n"
1159 " alr %0,%1"
1160 : "+&d" (res), "+a" (word), "=&d" (reg)
1161 : "a" (&_zb_findmap) : "cc" );
1162 if (res < 64)
1163 return (p - addr)*64 + res;
1164 p++;
1165 } 1009 }
1166 /* No zero yet, search remaining full bytes for a zero */ 1010 return offset + ext2_find_first_zero_bit(p, size);
1167 res = ext2_find_first_zero_bit (p, size - 64 * (p - addr));
1168 return (p - addr) * 64 + res;
1169} 1011}
1170 1012
1171#endif /* __s390x__ */
1172
1173/* Bitmap functions for the minix filesystem. */ 1013/* Bitmap functions for the minix filesystem. */
1174/* FIXME !!! */ 1014/* FIXME !!! */
1175#define minix_test_and_set_bit(nr,addr) \ 1015#define minix_test_and_set_bit(nr,addr) \
diff --git a/include/asm-s390/lowcore.h b/include/asm-s390/lowcore.h
index 76b5b19c0ae2..afe6a9f9b0ae 100644
--- a/include/asm-s390/lowcore.h
+++ b/include/asm-s390/lowcore.h
@@ -90,7 +90,6 @@
90#define __LC_SYSTEM_TIMER 0x278 90#define __LC_SYSTEM_TIMER 0x278
91#define __LC_LAST_UPDATE_CLOCK 0x280 91#define __LC_LAST_UPDATE_CLOCK 0x280
92#define __LC_STEAL_CLOCK 0x288 92#define __LC_STEAL_CLOCK 0x288
93#define __LC_DIAG44_OPCODE 0x290
94#define __LC_KERNEL_STACK 0xD40 93#define __LC_KERNEL_STACK 0xD40
95#define __LC_THREAD_INFO 0xD48 94#define __LC_THREAD_INFO 0xD48
96#define __LC_ASYNC_STACK 0xD50 95#define __LC_ASYNC_STACK 0xD50
@@ -286,8 +285,7 @@ struct _lowcore
286 __u64 system_timer; /* 0x278 */ 285 __u64 system_timer; /* 0x278 */
287 __u64 last_update_clock; /* 0x280 */ 286 __u64 last_update_clock; /* 0x280 */
288 __u64 steal_clock; /* 0x288 */ 287 __u64 steal_clock; /* 0x288 */
289 __u32 diag44_opcode; /* 0x290 */ 288 __u8 pad8[0xc00-0x290]; /* 0x290 */
290 __u8 pad8[0xc00-0x294]; /* 0x294 */
291 /* System info area */ 289 /* System info area */
292 __u64 save_area[16]; /* 0xc00 */ 290 __u64 save_area[16]; /* 0xc00 */
293 __u8 pad9[0xd40-0xc80]; /* 0xc80 */ 291 __u8 pad9[0xd40-0xc80]; /* 0xc80 */
diff --git a/include/asm-s390/processor.h b/include/asm-s390/processor.h
index 8bd14de69e35..4ec652ebb3b1 100644
--- a/include/asm-s390/processor.h
+++ b/include/asm-s390/processor.h
@@ -203,7 +203,10 @@ unsigned long get_wchan(struct task_struct *p);
203# define cpu_relax() asm volatile ("diag 0,0,68" : : : "memory") 203# define cpu_relax() asm volatile ("diag 0,0,68" : : : "memory")
204#else /* __s390x__ */ 204#else /* __s390x__ */
205# define cpu_relax() \ 205# define cpu_relax() \
206 asm volatile ("ex 0,%0" : : "i" (__LC_DIAG44_OPCODE) : "memory") 206 do { \
207 if (MACHINE_HAS_DIAG44) \
208 asm volatile ("diag 0,0,68" : : : "memory"); \
209 } while (0)
207#endif /* __s390x__ */ 210#endif /* __s390x__ */
208 211
209/* 212/*
diff --git a/include/asm-s390/spinlock.h b/include/asm-s390/spinlock.h
index 53cc736b9820..8ff10300f7ee 100644
--- a/include/asm-s390/spinlock.h
+++ b/include/asm-s390/spinlock.h
@@ -11,21 +11,16 @@
11#ifndef __ASM_SPINLOCK_H 11#ifndef __ASM_SPINLOCK_H
12#define __ASM_SPINLOCK_H 12#define __ASM_SPINLOCK_H
13 13
14#ifdef __s390x__ 14static inline int
15/* 15_raw_compare_and_swap(volatile unsigned int *lock,
16 * Grmph, take care of %&#! user space programs that include 16 unsigned int old, unsigned int new)
17 * asm/spinlock.h. The diagnose is only available in kernel 17{
18 * context. 18 asm volatile ("cs %0,%3,0(%4)"
19 */ 19 : "=d" (old), "=m" (*lock)
20#ifdef __KERNEL__ 20 : "0" (old), "d" (new), "a" (lock), "m" (*lock)
21#include <asm/lowcore.h> 21 : "cc", "memory" );
22#define __DIAG44_INSN "ex" 22 return old;
23#define __DIAG44_OPERAND __LC_DIAG44_OPCODE 23}
24#else
25#define __DIAG44_INSN "#"
26#define __DIAG44_OPERAND 0
27#endif
28#endif /* __s390x__ */
29 24
30/* 25/*
31 * Simple spin lock operations. There are two variants, one clears IRQ's 26 * Simple spin lock operations. There are two variants, one clears IRQ's
@@ -41,58 +36,35 @@ typedef struct {
41#endif 36#endif
42} __attribute__ ((aligned (4))) spinlock_t; 37} __attribute__ ((aligned (4))) spinlock_t;
43 38
44#define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 } 39#define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
45#define spin_lock_init(lp) do { (lp)->lock = 0; } while(0) 40#define spin_lock_init(lp) do { (lp)->lock = 0; } while(0)
46#define spin_unlock_wait(lp) do { barrier(); } while(((volatile spinlock_t *)(lp))->lock) 41#define spin_unlock_wait(lp) do { barrier(); } while(((volatile spinlock_t *)(lp))->lock)
47#define spin_is_locked(x) ((x)->lock != 0) 42#define spin_is_locked(x) ((x)->lock != 0)
48#define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock) 43#define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
49 44
50extern inline void _raw_spin_lock(spinlock_t *lp) 45extern void _raw_spin_lock_wait(spinlock_t *lp, unsigned int pc);
46extern int _raw_spin_trylock_retry(spinlock_t *lp, unsigned int pc);
47
48static inline void _raw_spin_lock(spinlock_t *lp)
51{ 49{
52#ifndef __s390x__ 50 unsigned long pc = (unsigned long) __builtin_return_address(0);
53 unsigned int reg1, reg2; 51
54 __asm__ __volatile__(" bras %0,1f\n" 52 if (unlikely(_raw_compare_and_swap(&lp->lock, 0, pc) != 0))
55 "0: diag 0,0,68\n" 53 _raw_spin_lock_wait(lp, pc);
56 "1: slr %1,%1\n"
57 " cs %1,%0,0(%3)\n"
58 " jl 0b\n"
59 : "=&d" (reg1), "=&d" (reg2), "=m" (lp->lock)
60 : "a" (&lp->lock), "m" (lp->lock)
61 : "cc", "memory" );
62#else /* __s390x__ */
63 unsigned long reg1, reg2;
64 __asm__ __volatile__(" bras %1,1f\n"
65 "0: " __DIAG44_INSN " 0,%4\n"
66 "1: slr %0,%0\n"
67 " cs %0,%1,0(%3)\n"
68 " jl 0b\n"
69 : "=&d" (reg1), "=&d" (reg2), "=m" (lp->lock)
70 : "a" (&lp->lock), "i" (__DIAG44_OPERAND),
71 "m" (lp->lock) : "cc", "memory" );
72#endif /* __s390x__ */
73} 54}
74 55
75extern inline int _raw_spin_trylock(spinlock_t *lp) 56static inline int _raw_spin_trylock(spinlock_t *lp)
76{ 57{
77 unsigned long reg; 58 unsigned long pc = (unsigned long) __builtin_return_address(0);
78 unsigned int result; 59
79 60 if (likely(_raw_compare_and_swap(&lp->lock, 0, pc) == 0))
80 __asm__ __volatile__(" basr %1,0\n" 61 return 1;
81 "0: cs %0,%1,0(%3)" 62 return _raw_spin_trylock_retry(lp, pc);
82 : "=d" (result), "=&d" (reg), "=m" (lp->lock)
83 : "a" (&lp->lock), "m" (lp->lock), "0" (0)
84 : "cc", "memory" );
85 return !result;
86} 63}
87 64
88extern inline void _raw_spin_unlock(spinlock_t *lp) 65static inline void _raw_spin_unlock(spinlock_t *lp)
89{ 66{
90 unsigned int old; 67 _raw_compare_and_swap(&lp->lock, lp->lock, 0);
91
92 __asm__ __volatile__("cs %0,%3,0(%4)"
93 : "=d" (old), "=m" (lp->lock)
94 : "0" (lp->lock), "d" (0), "a" (lp)
95 : "cc", "memory" );
96} 68}
97 69
98/* 70/*
@@ -106,7 +78,7 @@ extern inline void _raw_spin_unlock(spinlock_t *lp)
106 * read-locks. 78 * read-locks.
107 */ 79 */
108typedef struct { 80typedef struct {
109 volatile unsigned long lock; 81 volatile unsigned int lock;
110 volatile unsigned long owner_pc; 82 volatile unsigned long owner_pc;
111#ifdef CONFIG_PREEMPT 83#ifdef CONFIG_PREEMPT
112 unsigned int break_lock; 84 unsigned int break_lock;
@@ -129,123 +101,55 @@ typedef struct {
129 */ 101 */
130#define write_can_lock(x) ((x)->lock == 0) 102#define write_can_lock(x) ((x)->lock == 0)
131 103
132#ifndef __s390x__ 104extern void _raw_read_lock_wait(rwlock_t *lp);
133#define _raw_read_lock(rw) \ 105extern int _raw_read_trylock_retry(rwlock_t *lp);
134 asm volatile(" l 2,0(%1)\n" \ 106extern void _raw_write_lock_wait(rwlock_t *lp);
135 " j 1f\n" \ 107extern int _raw_write_trylock_retry(rwlock_t *lp);
136 "0: diag 0,0,68\n" \ 108
137 "1: la 2,0(2)\n" /* clear high (=write) bit */ \ 109static inline void _raw_read_lock(rwlock_t *rw)
138 " la 3,1(2)\n" /* one more reader */ \ 110{
139 " cs 2,3,0(%1)\n" /* try to write new value */ \ 111 unsigned int old;
140 " jl 0b" \ 112 old = rw->lock & 0x7fffffffU;
141 : "=m" ((rw)->lock) : "a" (&(rw)->lock), \ 113 if (_raw_compare_and_swap(&rw->lock, old, old + 1) != old)
142 "m" ((rw)->lock) : "2", "3", "cc", "memory" ) 114 _raw_read_lock_wait(rw);
143#else /* __s390x__ */ 115}
144#define _raw_read_lock(rw) \ 116
145 asm volatile(" lg 2,0(%1)\n" \ 117static inline void _raw_read_unlock(rwlock_t *rw)
146 " j 1f\n" \ 118{
147 "0: " __DIAG44_INSN " 0,%2\n" \ 119 unsigned int old, cmp;
148 "1: nihh 2,0x7fff\n" /* clear high (=write) bit */ \ 120
149 " la 3,1(2)\n" /* one more reader */ \ 121 old = rw->lock;
150 " csg 2,3,0(%1)\n" /* try to write new value */ \ 122 do {
151 " jl 0b" \ 123 cmp = old;
152 : "=m" ((rw)->lock) \ 124 old = _raw_compare_and_swap(&rw->lock, old, old - 1);
153 : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \ 125 } while (cmp != old);
154 "m" ((rw)->lock) : "2", "3", "cc", "memory" ) 126}
155#endif /* __s390x__ */ 127
156 128static inline void _raw_write_lock(rwlock_t *rw)
157#ifndef __s390x__ 129{
158#define _raw_read_unlock(rw) \ 130 if (unlikely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) != 0))
159 asm volatile(" l 2,0(%1)\n" \ 131 _raw_write_lock_wait(rw);
160 " j 1f\n" \ 132}
161 "0: diag 0,0,68\n" \ 133
162 "1: lr 3,2\n" \ 134static inline void _raw_write_unlock(rwlock_t *rw)
163 " ahi 3,-1\n" /* one less reader */ \ 135{
164 " cs 2,3,0(%1)\n" \ 136 _raw_compare_and_swap(&rw->lock, 0x80000000, 0);
165 " jl 0b" \ 137}
166 : "=m" ((rw)->lock) : "a" (&(rw)->lock), \ 138
167 "m" ((rw)->lock) : "2", "3", "cc", "memory" ) 139static inline int _raw_read_trylock(rwlock_t *rw)
168#else /* __s390x__ */ 140{
169#define _raw_read_unlock(rw) \ 141 unsigned int old;
170 asm volatile(" lg 2,0(%1)\n" \ 142 old = rw->lock & 0x7fffffffU;
171 " j 1f\n" \ 143 if (likely(_raw_compare_and_swap(&rw->lock, old, old + 1) == old))
172 "0: " __DIAG44_INSN " 0,%2\n" \ 144 return 1;
173 "1: lgr 3,2\n" \ 145 return _raw_read_trylock_retry(rw);
174 " bctgr 3,0\n" /* one less reader */ \ 146}
175 " csg 2,3,0(%1)\n" \ 147
176 " jl 0b" \ 148static inline int _raw_write_trylock(rwlock_t *rw)
177 : "=m" ((rw)->lock) \
178 : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
179 "m" ((rw)->lock) : "2", "3", "cc", "memory" )
180#endif /* __s390x__ */
181
182#ifndef __s390x__
183#define _raw_write_lock(rw) \
184 asm volatile(" lhi 3,1\n" \
185 " sll 3,31\n" /* new lock value = 0x80000000 */ \
186 " j 1f\n" \
187 "0: diag 0,0,68\n" \
188 "1: slr 2,2\n" /* old lock value must be 0 */ \
189 " cs 2,3,0(%1)\n" \
190 " jl 0b" \
191 : "=m" ((rw)->lock) : "a" (&(rw)->lock), \
192 "m" ((rw)->lock) : "2", "3", "cc", "memory" )
193#else /* __s390x__ */
194#define _raw_write_lock(rw) \
195 asm volatile(" llihh 3,0x8000\n" /* new lock value = 0x80...0 */ \
196 " j 1f\n" \
197 "0: " __DIAG44_INSN " 0,%2\n" \
198 "1: slgr 2,2\n" /* old lock value must be 0 */ \
199 " csg 2,3,0(%1)\n" \
200 " jl 0b" \
201 : "=m" ((rw)->lock) \
202 : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
203 "m" ((rw)->lock) : "2", "3", "cc", "memory" )
204#endif /* __s390x__ */
205
206#ifndef __s390x__
207#define _raw_write_unlock(rw) \
208 asm volatile(" slr 3,3\n" /* new lock value = 0 */ \
209 " j 1f\n" \
210 "0: diag 0,0,68\n" \
211 "1: lhi 2,1\n" \
212 " sll 2,31\n" /* old lock value must be 0x80000000 */ \
213 " cs 2,3,0(%1)\n" \
214 " jl 0b" \
215 : "=m" ((rw)->lock) : "a" (&(rw)->lock), \
216 "m" ((rw)->lock) : "2", "3", "cc", "memory" )
217#else /* __s390x__ */
218#define _raw_write_unlock(rw) \
219 asm volatile(" slgr 3,3\n" /* new lock value = 0 */ \
220 " j 1f\n" \
221 "0: " __DIAG44_INSN " 0,%2\n" \
222 "1: llihh 2,0x8000\n" /* old lock value must be 0x8..0 */\
223 " csg 2,3,0(%1)\n" \
224 " jl 0b" \
225 : "=m" ((rw)->lock) \
226 : "a" (&(rw)->lock), "i" (__DIAG44_OPERAND), \
227 "m" ((rw)->lock) : "2", "3", "cc", "memory" )
228#endif /* __s390x__ */
229
230#define _raw_read_trylock(lock) generic_raw_read_trylock(lock)
231
232extern inline int _raw_write_trylock(rwlock_t *rw)
233{ 149{
234 unsigned long result, reg; 150 if (likely(_raw_compare_and_swap(&rw->lock, 0, 0x80000000) == 0))
235 151 return 1;
236 __asm__ __volatile__( 152 return _raw_write_trylock_retry(rw);
237#ifndef __s390x__
238 " lhi %1,1\n"
239 " sll %1,31\n"
240 " cs %0,%1,0(%3)"
241#else /* __s390x__ */
242 " llihh %1,0x8000\n"
243 "0: csg %0,%1,0(%3)\n"
244#endif /* __s390x__ */
245 : "=d" (result), "=&d" (reg), "=m" (rw->lock)
246 : "a" (&rw->lock), "m" (rw->lock), "0" (0UL)
247 : "cc", "memory" );
248 return result == 0;
249} 153}
250 154
251#endif /* __ASM_SPINLOCK_H */ 155#endif /* __ASM_SPINLOCK_H */