aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorAdrian Bunk <bunk@stusta.de>2005-09-10 03:27:16 -0400
committerLinus Torvalds <torvalds@g5.osdl.org>2005-09-10 13:06:34 -0400
commite2afe67453e5b1499459ee3596b1e7924a5208f5 (patch)
treea5de215c0df3ecff145d829cfe350b96eb643720 /include
parentea0e0a4f53a75ed9d0812352c0410f6fc2a0b62a (diff)
[PATCH] include/asm-i386/: "extern inline" -> "static inline"
"extern inline" doesn't make much sense. Signed-off-by: Adrian Bunk <bunk@stusta.de> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-i386/div64.h2
-rw-r--r--include/asm-i386/processor.h4
2 files changed, 3 insertions, 3 deletions
diff --git a/include/asm-i386/div64.h b/include/asm-i386/div64.h
index 28ed8b296afc..75c67c785bb8 100644
--- a/include/asm-i386/div64.h
+++ b/include/asm-i386/div64.h
@@ -35,7 +35,7 @@
35 */ 35 */
36#define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c) 36#define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c)
37 37
38extern inline long 38static inline long
39div_ll_X_l_rem(long long divs, long div, long *rem) 39div_ll_X_l_rem(long long divs, long div, long *rem)
40{ 40{
41 long dum2; 41 long dum2;
diff --git a/include/asm-i386/processor.h b/include/asm-i386/processor.h
index 37bef8ed7bed..0a4ec764377c 100644
--- a/include/asm-i386/processor.h
+++ b/include/asm-i386/processor.h
@@ -679,7 +679,7 @@ static inline void rep_nop(void)
679 However we don't do prefetches for pre XP Athlons currently 679 However we don't do prefetches for pre XP Athlons currently
680 That should be fixed. */ 680 That should be fixed. */
681#define ARCH_HAS_PREFETCH 681#define ARCH_HAS_PREFETCH
682extern inline void prefetch(const void *x) 682static inline void prefetch(const void *x)
683{ 683{
684 alternative_input(ASM_NOP4, 684 alternative_input(ASM_NOP4,
685 "prefetchnta (%1)", 685 "prefetchnta (%1)",
@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x)
693 693
694/* 3dnow! prefetch to get an exclusive cache line. Useful for 694/* 3dnow! prefetch to get an exclusive cache line. Useful for
695 spinlocks to avoid one state transition in the cache coherency protocol. */ 695 spinlocks to avoid one state transition in the cache coherency protocol. */
696extern inline void prefetchw(const void *x) 696static inline void prefetchw(const void *x)
697{ 697{
698 alternative_input(ASM_NOP4, 698 alternative_input(ASM_NOP4,
699 "prefetchw (%1)", 699 "prefetchw (%1)",