diff options
| author | Christoph Hellwig <hch@lst.de> | 2019-04-23 12:38:09 -0400 |
|---|---|---|
| committer | Arnd Bergmann <arnd@arndb.de> | 2019-04-23 15:51:41 -0400 |
| commit | 6edd1dbace0e8529ed167e8a5f9da63c0cc763cc (patch) | |
| tree | 157e43dccba6b1bd1a4c2bf3277c6b9d7359e850 | |
| parent | bd79f94758c011bdffd8d4afcfb578d169cb5e93 (diff) | |
asm-generic: optimize generic uaccess for 8-byte loads and stores
On 64-bit architectures we can also use the direct load/store trick for
8-byte values.
Signed-off-by: Christoph Hellwig <hch@lst.de>
Signed-off-by: Arnd Bergmann <arnd@arndb.de>
| -rw-r--r-- | include/asm-generic/uaccess.h | 10 |
1 files changed, 10 insertions, 0 deletions
diff --git a/include/asm-generic/uaccess.h b/include/asm-generic/uaccess.h index 3dcabfceb21e..e935318804f8 100644 --- a/include/asm-generic/uaccess.h +++ b/include/asm-generic/uaccess.h | |||
| @@ -24,6 +24,11 @@ raw_copy_from_user(void *to, const void __user * from, unsigned long n) | |||
| 24 | case 4: | 24 | case 4: |
| 25 | *(u32 *)to = *(u32 __force *)from; | 25 | *(u32 *)to = *(u32 __force *)from; |
| 26 | return 0; | 26 | return 0; |
| 27 | #ifdef CONFIG_64BIT | ||
| 28 | case 8: | ||
| 29 | *(u64 *)to = *(u64 __force *)from; | ||
| 30 | return 0; | ||
| 31 | #endif | ||
| 27 | } | 32 | } |
| 28 | } | 33 | } |
| 29 | 34 | ||
| @@ -45,6 +50,11 @@ raw_copy_to_user(void __user *to, const void *from, unsigned long n) | |||
| 45 | case 4: | 50 | case 4: |
| 46 | *(u32 __force *)to = *(u32 *)from; | 51 | *(u32 __force *)to = *(u32 *)from; |
| 47 | return 0; | 52 | return 0; |
| 53 | #ifdef CONFIG_64BIT | ||
| 54 | case 8: | ||
| 55 | *(u64 __force *)to = *(u64 *)from; | ||
| 56 | return 0; | ||
| 57 | #endif | ||
| 48 | default: | 58 | default: |
| 49 | break; | 59 | break; |
| 50 | } | 60 | } |
