diff options
| author | Linus Torvalds <torvalds@linux-foundation.org> | 2018-10-23 08:08:53 -0400 |
|---|---|---|
| committer | Linus Torvalds <torvalds@linux-foundation.org> | 2018-10-23 08:08:53 -0400 |
| commit | 0200fbdd431519d730b5d399a12840ec832b27cc (patch) | |
| tree | 2b58f9e24b61b00e0550f106c95bfabc3b52cfdd /arch/x86/lib | |
| parent | de3fbb2aa802a267dee2213ae7d5a1e19eb4294a (diff) | |
| parent | 01a14bda11add9dcd4a59200f13834d634559935 (diff) | |
Merge branch 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking and misc x86 updates from Ingo Molnar:
"Lots of changes in this cycle - in part because locking/core attracted
a number of related x86 low level work which was easier to handle in a
single tree:
- Linux Kernel Memory Consistency Model updates (Alan Stern, Paul E.
McKenney, Andrea Parri)
- lockdep scalability improvements and micro-optimizations (Waiman
Long)
- rwsem improvements (Waiman Long)
- spinlock micro-optimization (Matthew Wilcox)
- qspinlocks: Provide a liveness guarantee (more fairness) on x86.
(Peter Zijlstra)
- Add support for relative references in jump tables on arm64, x86
and s390 to optimize jump labels (Ard Biesheuvel, Heiko Carstens)
- Be a lot less permissive on weird (kernel address) uaccess faults
on x86: BUG() when uaccess helpers fault on kernel addresses (Jann
Horn)
- macrofy x86 asm statements to un-confuse the GCC inliner. (Nadav
Amit)
- ... and a handful of other smaller changes as well"
* 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (57 commits)
locking/lockdep: Make global debug_locks* variables read-mostly
locking/lockdep: Fix debug_locks off performance problem
locking/pvqspinlock: Extend node size when pvqspinlock is configured
locking/qspinlock_stat: Count instances of nested lock slowpaths
locking/qspinlock, x86: Provide liveness guarantee
x86/asm: 'Simplify' GEN_*_RMWcc() macros
locking/qspinlock: Rework some comments
locking/qspinlock: Re-order code
locking/lockdep: Remove duplicated 'lock_class_ops' percpu array
x86/defconfig: Enable CONFIG_USB_XHCI_HCD=y
futex: Replace spin_is_locked() with lockdep
locking/lockdep: Make class->ops a percpu counter and move it under CONFIG_DEBUG_LOCKDEP=y
x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs
x86/cpufeature: Macrofy inline assembly code to work around GCC inlining bugs
x86/extable: Macrofy inline assembly code to work around GCC inlining bugs
x86/paravirt: Work around GCC inlining bugs when compiling paravirt ops
x86/bug: Macrofy the BUG table section handling, to work around GCC inlining bugs
x86/alternatives: Macrofy lock prefixes to work around GCC inlining bugs
x86/refcount: Work around GCC inlining bug
x86/objtool: Use asm macros to work around GCC inlining bugs
...
Diffstat (limited to 'arch/x86/lib')
| -rw-r--r-- | arch/x86/lib/checksum_32.S | 4 | ||||
| -rw-r--r-- | arch/x86/lib/copy_user_64.S | 90 | ||||
| -rw-r--r-- | arch/x86/lib/csum-copy_64.S | 8 | ||||
| -rw-r--r-- | arch/x86/lib/getuser.S | 12 | ||||
| -rw-r--r-- | arch/x86/lib/putuser.S | 10 | ||||
| -rw-r--r-- | arch/x86/lib/usercopy_32.c | 126 | ||||
| -rw-r--r-- | arch/x86/lib/usercopy_64.c | 4 |
7 files changed, 129 insertions, 125 deletions
diff --git a/arch/x86/lib/checksum_32.S b/arch/x86/lib/checksum_32.S index 46e71a74e612..ad8e0906d1ea 100644 --- a/arch/x86/lib/checksum_32.S +++ b/arch/x86/lib/checksum_32.S | |||
| @@ -273,11 +273,11 @@ unsigned int csum_partial_copy_generic (const char *src, char *dst, | |||
| 273 | 273 | ||
| 274 | #define SRC(y...) \ | 274 | #define SRC(y...) \ |
| 275 | 9999: y; \ | 275 | 9999: y; \ |
| 276 | _ASM_EXTABLE(9999b, 6001f) | 276 | _ASM_EXTABLE_UA(9999b, 6001f) |
| 277 | 277 | ||
| 278 | #define DST(y...) \ | 278 | #define DST(y...) \ |
| 279 | 9999: y; \ | 279 | 9999: y; \ |
| 280 | _ASM_EXTABLE(9999b, 6002f) | 280 | _ASM_EXTABLE_UA(9999b, 6002f) |
| 281 | 281 | ||
| 282 | #ifndef CONFIG_X86_USE_PPRO_CHECKSUM | 282 | #ifndef CONFIG_X86_USE_PPRO_CHECKSUM |
| 283 | 283 | ||
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S index 020f75cc8cf6..db4e5aa0858b 100644 --- a/arch/x86/lib/copy_user_64.S +++ b/arch/x86/lib/copy_user_64.S | |||
| @@ -92,26 +92,26 @@ ENTRY(copy_user_generic_unrolled) | |||
| 92 | 60: jmp copy_user_handle_tail /* ecx is zerorest also */ | 92 | 60: jmp copy_user_handle_tail /* ecx is zerorest also */ |
| 93 | .previous | 93 | .previous |
| 94 | 94 | ||
| 95 | _ASM_EXTABLE(1b,30b) | 95 | _ASM_EXTABLE_UA(1b, 30b) |
| 96 | _ASM_EXTABLE(2b,30b) | 96 | _ASM_EXTABLE_UA(2b, 30b) |
| 97 | _ASM_EXTABLE(3b,30b) | 97 | _ASM_EXTABLE_UA(3b, 30b) |
| 98 | _ASM_EXTABLE(4b,30b) | 98 | _ASM_EXTABLE_UA(4b, 30b) |
| 99 | _ASM_EXTABLE(5b,30b) | 99 | _ASM_EXTABLE_UA(5b, 30b) |
| 100 | _ASM_EXTABLE(6b,30b) | 100 | _ASM_EXTABLE_UA(6b, 30b) |
| 101 | _ASM_EXTABLE(7b,30b) | 101 | _ASM_EXTABLE_UA(7b, 30b) |
| 102 | _ASM_EXTABLE(8b,30b) | 102 | _ASM_EXTABLE_UA(8b, 30b) |
| 103 | _ASM_EXTABLE(9b,30b) | 103 | _ASM_EXTABLE_UA(9b, 30b) |
| 104 | _ASM_EXTABLE(10b,30b) | 104 | _ASM_EXTABLE_UA(10b, 30b) |
| 105 | _ASM_EXTABLE(11b,30b) | 105 | _ASM_EXTABLE_UA(11b, 30b) |
| 106 | _ASM_EXTABLE(12b,30b) | 106 | _ASM_EXTABLE_UA(12b, 30b) |
| 107 | _ASM_EXTABLE(13b,30b) | 107 | _ASM_EXTABLE_UA(13b, 30b) |
| 108 | _ASM_EXTABLE(14b,30b) | 108 | _ASM_EXTABLE_UA(14b, 30b) |
| 109 | _ASM_EXTABLE(15b,30b) | 109 | _ASM_EXTABLE_UA(15b, 30b) |
| 110 | _ASM_EXTABLE(16b,30b) | 110 | _ASM_EXTABLE_UA(16b, 30b) |
| 111 | _ASM_EXTABLE(18b,40b) | 111 | _ASM_EXTABLE_UA(18b, 40b) |
| 112 | _ASM_EXTABLE(19b,40b) | 112 | _ASM_EXTABLE_UA(19b, 40b) |
| 113 | _ASM_EXTABLE(21b,50b) | 113 | _ASM_EXTABLE_UA(21b, 50b) |
| 114 | _ASM_EXTABLE(22b,50b) | 114 | _ASM_EXTABLE_UA(22b, 50b) |
| 115 | ENDPROC(copy_user_generic_unrolled) | 115 | ENDPROC(copy_user_generic_unrolled) |
| 116 | EXPORT_SYMBOL(copy_user_generic_unrolled) | 116 | EXPORT_SYMBOL(copy_user_generic_unrolled) |
| 117 | 117 | ||
| @@ -156,8 +156,8 @@ ENTRY(copy_user_generic_string) | |||
| 156 | jmp copy_user_handle_tail | 156 | jmp copy_user_handle_tail |
| 157 | .previous | 157 | .previous |
| 158 | 158 | ||
| 159 | _ASM_EXTABLE(1b,11b) | 159 | _ASM_EXTABLE_UA(1b, 11b) |
| 160 | _ASM_EXTABLE(3b,12b) | 160 | _ASM_EXTABLE_UA(3b, 12b) |
| 161 | ENDPROC(copy_user_generic_string) | 161 | ENDPROC(copy_user_generic_string) |
| 162 | EXPORT_SYMBOL(copy_user_generic_string) | 162 | EXPORT_SYMBOL(copy_user_generic_string) |
| 163 | 163 | ||
| @@ -189,7 +189,7 @@ ENTRY(copy_user_enhanced_fast_string) | |||
| 189 | jmp copy_user_handle_tail | 189 | jmp copy_user_handle_tail |
| 190 | .previous | 190 | .previous |
| 191 | 191 | ||
| 192 | _ASM_EXTABLE(1b,12b) | 192 | _ASM_EXTABLE_UA(1b, 12b) |
| 193 | ENDPROC(copy_user_enhanced_fast_string) | 193 | ENDPROC(copy_user_enhanced_fast_string) |
| 194 | EXPORT_SYMBOL(copy_user_enhanced_fast_string) | 194 | EXPORT_SYMBOL(copy_user_enhanced_fast_string) |
| 195 | 195 | ||
| @@ -319,27 +319,27 @@ ENTRY(__copy_user_nocache) | |||
| 319 | jmp copy_user_handle_tail | 319 | jmp copy_user_handle_tail |
| 320 | .previous | 320 | .previous |
| 321 | 321 | ||
| 322 | _ASM_EXTABLE(1b,.L_fixup_4x8b_copy) | 322 | _ASM_EXTABLE_UA(1b, .L_fixup_4x8b_copy) |
| 323 | _ASM_EXTABLE(2b,.L_fixup_4x8b_copy) | 323 | _ASM_EXTABLE_UA(2b, .L_fixup_4x8b_copy) |
| 324 | _ASM_EXTABLE(3b,.L_fixup_4x8b_copy) | 324 | _ASM_EXTABLE_UA(3b, .L_fixup_4x8b_copy) |
| 325 | _ASM_EXTABLE(4b,.L_fixup_4x8b_copy) | 325 | _ASM_EXTABLE_UA(4b, .L_fixup_4x8b_copy) |
| 326 | _ASM_EXTABLE(5b,.L_fixup_4x8b_copy) | 326 | _ASM_EXTABLE_UA(5b, .L_fixup_4x8b_copy) |
| 327 | _ASM_EXTABLE(6b,.L_fixup_4x8b_copy) | 327 | _ASM_EXTABLE_UA(6b, .L_fixup_4x8b_copy) |
| 328 | _ASM_EXTABLE(7b,.L_fixup_4x8b_copy) | 328 | _ASM_EXTABLE_UA(7b, .L_fixup_4x8b_copy) |
| 329 | _ASM_EXTABLE(8b,.L_fixup_4x8b_copy) | 329 | _ASM_EXTABLE_UA(8b, .L_fixup_4x8b_copy) |
| 330 | _ASM_EXTABLE(9b,.L_fixup_4x8b_copy) | 330 | _ASM_EXTABLE_UA(9b, .L_fixup_4x8b_copy) |
| 331 | _ASM_EXTABLE(10b,.L_fixup_4x8b_copy) | 331 | _ASM_EXTABLE_UA(10b, .L_fixup_4x8b_copy) |
| 332 | _ASM_EXTABLE(11b,.L_fixup_4x8b_copy) | 332 | _ASM_EXTABLE_UA(11b, .L_fixup_4x8b_copy) |
| 333 | _ASM_EXTABLE(12b,.L_fixup_4x8b_copy) | 333 | _ASM_EXTABLE_UA(12b, .L_fixup_4x8b_copy) |
| 334 | _ASM_EXTABLE(13b,.L_fixup_4x8b_copy) | 334 | _ASM_EXTABLE_UA(13b, .L_fixup_4x8b_copy) |
| 335 | _ASM_EXTABLE(14b,.L_fixup_4x8b_copy) | 335 | _ASM_EXTABLE_UA(14b, .L_fixup_4x8b_copy) |
| 336 | _ASM_EXTABLE(15b,.L_fixup_4x8b_copy) | 336 | _ASM_EXTABLE_UA(15b, .L_fixup_4x8b_copy) |
| 337 | _ASM_EXTABLE(16b,.L_fixup_4x8b_copy) | 337 | _ASM_EXTABLE_UA(16b, .L_fixup_4x8b_copy) |
| 338 | _ASM_EXTABLE(20b,.L_fixup_8b_copy) | 338 | _ASM_EXTABLE_UA(20b, .L_fixup_8b_copy) |
| 339 | _ASM_EXTABLE(21b,.L_fixup_8b_copy) | 339 | _ASM_EXTABLE_UA(21b, .L_fixup_8b_copy) |
| 340 | _ASM_EXTABLE(30b,.L_fixup_4b_copy) | 340 | _ASM_EXTABLE_UA(30b, .L_fixup_4b_copy) |
| 341 | _ASM_EXTABLE(31b,.L_fixup_4b_copy) | 341 | _ASM_EXTABLE_UA(31b, .L_fixup_4b_copy) |
| 342 | _ASM_EXTABLE(40b,.L_fixup_1b_copy) | 342 | _ASM_EXTABLE_UA(40b, .L_fixup_1b_copy) |
| 343 | _ASM_EXTABLE(41b,.L_fixup_1b_copy) | 343 | _ASM_EXTABLE_UA(41b, .L_fixup_1b_copy) |
| 344 | ENDPROC(__copy_user_nocache) | 344 | ENDPROC(__copy_user_nocache) |
| 345 | EXPORT_SYMBOL(__copy_user_nocache) | 345 | EXPORT_SYMBOL(__copy_user_nocache) |
diff --git a/arch/x86/lib/csum-copy_64.S b/arch/x86/lib/csum-copy_64.S index 45a53dfe1859..a4a379e79259 100644 --- a/arch/x86/lib/csum-copy_64.S +++ b/arch/x86/lib/csum-copy_64.S | |||
| @@ -31,14 +31,18 @@ | |||
| 31 | 31 | ||
| 32 | .macro source | 32 | .macro source |
| 33 | 10: | 33 | 10: |
| 34 | _ASM_EXTABLE(10b, .Lbad_source) | 34 | _ASM_EXTABLE_UA(10b, .Lbad_source) |
| 35 | .endm | 35 | .endm |
| 36 | 36 | ||
| 37 | .macro dest | 37 | .macro dest |
| 38 | 20: | 38 | 20: |
| 39 | _ASM_EXTABLE(20b, .Lbad_dest) | 39 | _ASM_EXTABLE_UA(20b, .Lbad_dest) |
| 40 | .endm | 40 | .endm |
| 41 | 41 | ||
| 42 | /* | ||
| 43 | * No _ASM_EXTABLE_UA; this is used for intentional prefetch on a | ||
| 44 | * potentially unmapped kernel address. | ||
| 45 | */ | ||
| 42 | .macro ignore L=.Lignore | 46 | .macro ignore L=.Lignore |
| 43 | 30: | 47 | 30: |
| 44 | _ASM_EXTABLE(30b, \L) | 48 | _ASM_EXTABLE(30b, \L) |
diff --git a/arch/x86/lib/getuser.S b/arch/x86/lib/getuser.S index 49b167f73215..74fdff968ea3 100644 --- a/arch/x86/lib/getuser.S +++ b/arch/x86/lib/getuser.S | |||
| @@ -132,12 +132,12 @@ bad_get_user_8: | |||
| 132 | END(bad_get_user_8) | 132 | END(bad_get_user_8) |
| 133 | #endif | 133 | #endif |
| 134 | 134 | ||
| 135 | _ASM_EXTABLE(1b,bad_get_user) | 135 | _ASM_EXTABLE_UA(1b, bad_get_user) |
| 136 | _ASM_EXTABLE(2b,bad_get_user) | 136 | _ASM_EXTABLE_UA(2b, bad_get_user) |
| 137 | _ASM_EXTABLE(3b,bad_get_user) | 137 | _ASM_EXTABLE_UA(3b, bad_get_user) |
| 138 | #ifdef CONFIG_X86_64 | 138 | #ifdef CONFIG_X86_64 |
| 139 | _ASM_EXTABLE(4b,bad_get_user) | 139 | _ASM_EXTABLE_UA(4b, bad_get_user) |
| 140 | #else | 140 | #else |
| 141 | _ASM_EXTABLE(4b,bad_get_user_8) | 141 | _ASM_EXTABLE_UA(4b, bad_get_user_8) |
| 142 | _ASM_EXTABLE(5b,bad_get_user_8) | 142 | _ASM_EXTABLE_UA(5b, bad_get_user_8) |
| 143 | #endif | 143 | #endif |
diff --git a/arch/x86/lib/putuser.S b/arch/x86/lib/putuser.S index 96dce5fe2a35..d2e5c9c39601 100644 --- a/arch/x86/lib/putuser.S +++ b/arch/x86/lib/putuser.S | |||
| @@ -94,10 +94,10 @@ bad_put_user: | |||
| 94 | EXIT | 94 | EXIT |
| 95 | END(bad_put_user) | 95 | END(bad_put_user) |
| 96 | 96 | ||
| 97 | _ASM_EXTABLE(1b,bad_put_user) | 97 | _ASM_EXTABLE_UA(1b, bad_put_user) |
| 98 | _ASM_EXTABLE(2b,bad_put_user) | 98 | _ASM_EXTABLE_UA(2b, bad_put_user) |
| 99 | _ASM_EXTABLE(3b,bad_put_user) | 99 | _ASM_EXTABLE_UA(3b, bad_put_user) |
| 100 | _ASM_EXTABLE(4b,bad_put_user) | 100 | _ASM_EXTABLE_UA(4b, bad_put_user) |
| 101 | #ifdef CONFIG_X86_32 | 101 | #ifdef CONFIG_X86_32 |
| 102 | _ASM_EXTABLE(5b,bad_put_user) | 102 | _ASM_EXTABLE_UA(5b, bad_put_user) |
| 103 | #endif | 103 | #endif |
diff --git a/arch/x86/lib/usercopy_32.c b/arch/x86/lib/usercopy_32.c index 7add8ba06887..71fb58d44d58 100644 --- a/arch/x86/lib/usercopy_32.c +++ b/arch/x86/lib/usercopy_32.c | |||
| @@ -47,8 +47,8 @@ do { \ | |||
| 47 | "3: lea 0(%2,%0,4),%0\n" \ | 47 | "3: lea 0(%2,%0,4),%0\n" \ |
| 48 | " jmp 2b\n" \ | 48 | " jmp 2b\n" \ |
| 49 | ".previous\n" \ | 49 | ".previous\n" \ |
| 50 | _ASM_EXTABLE(0b,3b) \ | 50 | _ASM_EXTABLE_UA(0b, 3b) \ |
| 51 | _ASM_EXTABLE(1b,2b) \ | 51 | _ASM_EXTABLE_UA(1b, 2b) \ |
| 52 | : "=&c"(size), "=&D" (__d0) \ | 52 | : "=&c"(size), "=&D" (__d0) \ |
| 53 | : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ | 53 | : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ |
| 54 | } while (0) | 54 | } while (0) |
| @@ -153,44 +153,44 @@ __copy_user_intel(void __user *to, const void *from, unsigned long size) | |||
| 153 | "101: lea 0(%%eax,%0,4),%0\n" | 153 | "101: lea 0(%%eax,%0,4),%0\n" |
| 154 | " jmp 100b\n" | 154 | " jmp 100b\n" |
| 155 | ".previous\n" | 155 | ".previous\n" |
| 156 | _ASM_EXTABLE(1b,100b) | 156 | _ASM_EXTABLE_UA(1b, 100b) |
| 157 | _ASM_EXTABLE(2b,100b) | 157 | _ASM_EXTABLE_UA(2b, 100b) |
| 158 | _ASM_EXTABLE(3b,100b) | 158 | _ASM_EXTABLE_UA(3b, 100b) |
| 159 | _ASM_EXTABLE(4b,100b) | 159 | _ASM_EXTABLE_UA(4b, 100b) |
| 160 | _ASM_EXTABLE(5b,100b) | 160 | _ASM_EXTABLE_UA(5b, 100b) |
| 161 | _ASM_EXTABLE(6b,100b) | 161 | _ASM_EXTABLE_UA(6b, 100b) |
| 162 | _ASM_EXTABLE(7b,100b) | 162 | _ASM_EXTABLE_UA(7b, 100b) |
| 163 | _ASM_EXTABLE(8b,100b) | 163 | _ASM_EXTABLE_UA(8b, 100b) |
| 164 | _ASM_EXTABLE(9b,100b) | 164 | _ASM_EXTABLE_UA(9b, 100b) |
| 165 | _ASM_EXTABLE(10b,100b) | 165 | _ASM_EXTABLE_UA(10b, 100b) |
| 166 | _ASM_EXTABLE(11b,100b) | 166 | _ASM_EXTABLE_UA(11b, 100b) |
| 167 | _ASM_EXTABLE(12b,100b) | 167 | _ASM_EXTABLE_UA(12b, 100b) |
| 168 | _ASM_EXTABLE(13b,100b) | 168 | _ASM_EXTABLE_UA(13b, 100b) |
| 169 | _ASM_EXTABLE(14b,100b) | 169 | _ASM_EXTABLE_UA(14b, 100b) |
| 170 | _ASM_EXTABLE(15b,100b) | 170 | _ASM_EXTABLE_UA(15b, 100b) |
| 171 | _ASM_EXTABLE(16b,100b) | 171 | _ASM_EXTABLE_UA(16b, 100b) |
| 172 | _ASM_EXTABLE(17b,100b) | 172 | _ASM_EXTABLE_UA(17b, 100b) |
| 173 | _ASM_EXTABLE(18b,100b) | 173 | _ASM_EXTABLE_UA(18b, 100b) |
| 174 | _ASM_EXTABLE(19b,100b) | 174 | _ASM_EXTABLE_UA(19b, 100b) |
| 175 | _ASM_EXTABLE(20b,100b) | 175 | _ASM_EXTABLE_UA(20b, 100b) |
| 176 | _ASM_EXTABLE(21b,100b) | 176 | _ASM_EXTABLE_UA(21b, 100b) |
| 177 | _ASM_EXTABLE(22b,100b) | 177 | _ASM_EXTABLE_UA(22b, 100b) |
| 178 | _ASM_EXTABLE(23b,100b) | 178 | _ASM_EXTABLE_UA(23b, 100b) |
| 179 | _ASM_EXTABLE(24b,100b) | 179 | _ASM_EXTABLE_UA(24b, 100b) |
| 180 | _ASM_EXTABLE(25b,100b) | 180 | _ASM_EXTABLE_UA(25b, 100b) |
| 181 | _ASM_EXTABLE(26b,100b) | 181 | _ASM_EXTABLE_UA(26b, 100b) |
| 182 | _ASM_EXTABLE(27b,100b) | 182 | _ASM_EXTABLE_UA(27b, 100b) |
| 183 | _ASM_EXTABLE(28b,100b) | 183 | _ASM_EXTABLE_UA(28b, 100b) |
| 184 | _ASM_EXTABLE(29b,100b) | 184 | _ASM_EXTABLE_UA(29b, 100b) |
| 185 | _ASM_EXTABLE(30b,100b) | 185 | _ASM_EXTABLE_UA(30b, 100b) |
| 186 | _ASM_EXTABLE(31b,100b) | 186 | _ASM_EXTABLE_UA(31b, 100b) |
| 187 | _ASM_EXTABLE(32b,100b) | 187 | _ASM_EXTABLE_UA(32b, 100b) |
| 188 | _ASM_EXTABLE(33b,100b) | 188 | _ASM_EXTABLE_UA(33b, 100b) |
| 189 | _ASM_EXTABLE(34b,100b) | 189 | _ASM_EXTABLE_UA(34b, 100b) |
| 190 | _ASM_EXTABLE(35b,100b) | 190 | _ASM_EXTABLE_UA(35b, 100b) |
| 191 | _ASM_EXTABLE(36b,100b) | 191 | _ASM_EXTABLE_UA(36b, 100b) |
| 192 | _ASM_EXTABLE(37b,100b) | 192 | _ASM_EXTABLE_UA(37b, 100b) |
| 193 | _ASM_EXTABLE(99b,101b) | 193 | _ASM_EXTABLE_UA(99b, 101b) |
| 194 | : "=&c"(size), "=&D" (d0), "=&S" (d1) | 194 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
| 195 | : "1"(to), "2"(from), "0"(size) | 195 | : "1"(to), "2"(from), "0"(size) |
| 196 | : "eax", "edx", "memory"); | 196 | : "eax", "edx", "memory"); |
| @@ -259,26 +259,26 @@ static unsigned long __copy_user_intel_nocache(void *to, | |||
| 259 | "9: lea 0(%%eax,%0,4),%0\n" | 259 | "9: lea 0(%%eax,%0,4),%0\n" |
| 260 | "16: jmp 8b\n" | 260 | "16: jmp 8b\n" |
| 261 | ".previous\n" | 261 | ".previous\n" |
| 262 | _ASM_EXTABLE(0b,16b) | 262 | _ASM_EXTABLE_UA(0b, 16b) |
| 263 | _ASM_EXTABLE(1b,16b) | 263 | _ASM_EXTABLE_UA(1b, 16b) |
| 264 | _ASM_EXTABLE(2b,16b) | 264 | _ASM_EXTABLE_UA(2b, 16b) |
| 265 | _ASM_EXTABLE(21b,16b) | 265 | _ASM_EXTABLE_UA(21b, 16b) |
| 266 | _ASM_EXTABLE(3b,16b) | 266 | _ASM_EXTABLE_UA(3b, 16b) |
| 267 | _ASM_EXTABLE(31b,16b) | 267 | _ASM_EXTABLE_UA(31b, 16b) |
| 268 | _ASM_EXTABLE(4b,16b) | 268 | _ASM_EXTABLE_UA(4b, 16b) |
| 269 | _ASM_EXTABLE(41b,16b) | 269 | _ASM_EXTABLE_UA(41b, 16b) |
| 270 | _ASM_EXTABLE(10b,16b) | 270 | _ASM_EXTABLE_UA(10b, 16b) |
| 271 | _ASM_EXTABLE(51b,16b) | 271 | _ASM_EXTABLE_UA(51b, 16b) |
| 272 | _ASM_EXTABLE(11b,16b) | 272 | _ASM_EXTABLE_UA(11b, 16b) |
| 273 | _ASM_EXTABLE(61b,16b) | 273 | _ASM_EXTABLE_UA(61b, 16b) |
| 274 | _ASM_EXTABLE(12b,16b) | 274 | _ASM_EXTABLE_UA(12b, 16b) |
| 275 | _ASM_EXTABLE(71b,16b) | 275 | _ASM_EXTABLE_UA(71b, 16b) |
| 276 | _ASM_EXTABLE(13b,16b) | 276 | _ASM_EXTABLE_UA(13b, 16b) |
| 277 | _ASM_EXTABLE(81b,16b) | 277 | _ASM_EXTABLE_UA(81b, 16b) |
| 278 | _ASM_EXTABLE(14b,16b) | 278 | _ASM_EXTABLE_UA(14b, 16b) |
| 279 | _ASM_EXTABLE(91b,16b) | 279 | _ASM_EXTABLE_UA(91b, 16b) |
| 280 | _ASM_EXTABLE(6b,9b) | 280 | _ASM_EXTABLE_UA(6b, 9b) |
| 281 | _ASM_EXTABLE(7b,16b) | 281 | _ASM_EXTABLE_UA(7b, 16b) |
| 282 | : "=&c"(size), "=&D" (d0), "=&S" (d1) | 282 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
| 283 | : "1"(to), "2"(from), "0"(size) | 283 | : "1"(to), "2"(from), "0"(size) |
| 284 | : "eax", "edx", "memory"); | 284 | : "eax", "edx", "memory"); |
| @@ -321,9 +321,9 @@ do { \ | |||
| 321 | "3: lea 0(%3,%0,4),%0\n" \ | 321 | "3: lea 0(%3,%0,4),%0\n" \ |
| 322 | " jmp 2b\n" \ | 322 | " jmp 2b\n" \ |
| 323 | ".previous\n" \ | 323 | ".previous\n" \ |
| 324 | _ASM_EXTABLE(4b,5b) \ | 324 | _ASM_EXTABLE_UA(4b, 5b) \ |
| 325 | _ASM_EXTABLE(0b,3b) \ | 325 | _ASM_EXTABLE_UA(0b, 3b) \ |
| 326 | _ASM_EXTABLE(1b,2b) \ | 326 | _ASM_EXTABLE_UA(1b, 2b) \ |
| 327 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ | 327 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ |
| 328 | : "3"(size), "0"(size), "1"(to), "2"(from) \ | 328 | : "3"(size), "0"(size), "1"(to), "2"(from) \ |
| 329 | : "memory"); \ | 329 | : "memory"); \ |
diff --git a/arch/x86/lib/usercopy_64.c b/arch/x86/lib/usercopy_64.c index 9c5606d88f61..fefe64436398 100644 --- a/arch/x86/lib/usercopy_64.c +++ b/arch/x86/lib/usercopy_64.c | |||
| @@ -37,8 +37,8 @@ unsigned long __clear_user(void __user *addr, unsigned long size) | |||
| 37 | "3: lea 0(%[size1],%[size8],8),%[size8]\n" | 37 | "3: lea 0(%[size1],%[size8],8),%[size8]\n" |
| 38 | " jmp 2b\n" | 38 | " jmp 2b\n" |
| 39 | ".previous\n" | 39 | ".previous\n" |
| 40 | _ASM_EXTABLE(0b,3b) | 40 | _ASM_EXTABLE_UA(0b, 3b) |
| 41 | _ASM_EXTABLE(1b,2b) | 41 | _ASM_EXTABLE_UA(1b, 2b) |
| 42 | : [size8] "=&c"(size), [dst] "=&D" (__d0) | 42 | : [size8] "=&c"(size), [dst] "=&D" (__d0) |
| 43 | : [size1] "r"(size & 7), "[size8]" (size / 8), "[dst]"(addr)); | 43 | : [size1] "r"(size & 7), "[size8]" (size / 8), "[dst]"(addr)); |
| 44 | clac(); | 44 | clac(); |
