aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorGlauber Costa <gcosta@redhat.com>2008-06-25 13:53:41 -0400
committerIngo Molnar <mingo@elte.hu>2008-07-09 03:14:28 -0400
commit8bc7de0c5dc0a5d3bcdc04bac6de0c799f91c5e4 (patch)
treec1f8dfbe35c408f41d89d345a0f8bf03d2af5dd9 /include
parent8cb834e99f44bd56409b794504ae2b170675fc92 (diff)
x86: put movsl_mask into uaccess.h.
x86_64 does not need it, but it won't have X86_INTEL_USERCOPY defined either. Signed-off-by: Glauber Costa <gcosta@redhat.com> Signed-off-by: H. Peter Anvin <hpa@zytor.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include')
-rw-r--r--include/asm-x86/uaccess.h9
-rw-r--r--include/asm-x86/uaccess_32.h9
2 files changed, 9 insertions, 9 deletions
diff --git a/include/asm-x86/uaccess.h b/include/asm-x86/uaccess.h
index 7c7b46af6368..0c4ab788d017 100644
--- a/include/asm-x86/uaccess.h
+++ b/include/asm-x86/uaccess.h
@@ -432,6 +432,15 @@ struct __large_struct { unsigned long buf[100]; };
432#define __get_user_unaligned __get_user 432#define __get_user_unaligned __get_user
433#define __put_user_unaligned __put_user 433#define __put_user_unaligned __put_user
434 434
435/*
436 * movsl can be slow when source and dest are not both 8-byte aligned
437 */
438#ifdef CONFIG_X86_INTEL_USERCOPY
439extern struct movsl_mask {
440 int mask;
441} ____cacheline_aligned_in_smp movsl_mask;
442#endif
443
435#ifdef CONFIG_X86_32 444#ifdef CONFIG_X86_32
436# include "uaccess_32.h" 445# include "uaccess_32.h"
437#else 446#else
diff --git a/include/asm-x86/uaccess_32.h b/include/asm-x86/uaccess_32.h
index d3b5bf88ea86..3467749c6beb 100644
--- a/include/asm-x86/uaccess_32.h
+++ b/include/asm-x86/uaccess_32.h
@@ -11,15 +11,6 @@
11#include <asm/asm.h> 11#include <asm/asm.h>
12#include <asm/page.h> 12#include <asm/page.h>
13 13
14/*
15 * movsl can be slow when source and dest are not both 8-byte aligned
16 */
17#ifdef CONFIG_X86_INTEL_USERCOPY
18extern struct movsl_mask {
19 int mask;
20} ____cacheline_aligned_in_smp movsl_mask;
21#endif
22
23unsigned long __must_check __copy_to_user_ll 14unsigned long __must_check __copy_to_user_ll
24 (void __user *to, const void *from, unsigned long n); 15 (void __user *to, const void *from, unsigned long n);
25unsigned long __must_check __copy_from_user_ll 16unsigned long __must_check __copy_from_user_ll