Commit e2afe674 authored by Adrian Bunk's avatar Adrian Bunk Committed by Linus Torvalds

[PATCH] include/asm-i386/: "extern inline" -> "static inline"

"extern inline" doesn't make much sense.
Signed-off-by: default avatarAdrian Bunk <bunk@stusta.de>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent ea0e0a4f
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
*/ */
#define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c) #define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c)
extern inline long static inline long
div_ll_X_l_rem(long long divs, long div, long *rem) div_ll_X_l_rem(long long divs, long div, long *rem)
{ {
long dum2; long dum2;
......
...@@ -679,7 +679,7 @@ static inline void rep_nop(void) ...@@ -679,7 +679,7 @@ static inline void rep_nop(void)
However we don't do prefetches for pre XP Athlons currently However we don't do prefetches for pre XP Athlons currently
That should be fixed. */ That should be fixed. */
#define ARCH_HAS_PREFETCH #define ARCH_HAS_PREFETCH
extern inline void prefetch(const void *x) static inline void prefetch(const void *x)
{ {
alternative_input(ASM_NOP4, alternative_input(ASM_NOP4,
"prefetchnta (%1)", "prefetchnta (%1)",
...@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x) ...@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x)
/* 3dnow! prefetch to get an exclusive cache line. Useful for /* 3dnow! prefetch to get an exclusive cache line. Useful for
spinlocks to avoid one state transition in the cache coherency protocol. */ spinlocks to avoid one state transition in the cache coherency protocol. */
extern inline void prefetchw(const void *x) static inline void prefetchw(const void *x)
{ {
alternative_input(ASM_NOP4, alternative_input(ASM_NOP4,
"prefetchw (%1)", "prefetchw (%1)",
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment