summaryrefslogtreecommitdiff
path: root/include/asm-i386
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386')
-rw-r--r--include/asm-i386/div64.h2
-rw-r--r--include/asm-i386/processor.h4
2 files changed, 3 insertions, 3 deletions
diff --git a/include/asm-i386/div64.h b/include/asm-i386/div64.h
index 28ed8b296afc..75c67c785bb8 100644
--- a/include/asm-i386/div64.h
+++ b/include/asm-i386/div64.h
@@ -35,7 +35,7 @@
*/
#define div_long_long_rem(a,b,c) div_ll_X_l_rem(a,b,c)
-extern inline long
+static inline long
div_ll_X_l_rem(long long divs, long div, long *rem)
{
long dum2;
diff --git a/include/asm-i386/processor.h b/include/asm-i386/processor.h
index 37bef8ed7bed..0a4ec764377c 100644
--- a/include/asm-i386/processor.h
+++ b/include/asm-i386/processor.h
@@ -679,7 +679,7 @@ static inline void rep_nop(void)
However we don't do prefetches for pre XP Athlons currently
That should be fixed. */
#define ARCH_HAS_PREFETCH
-extern inline void prefetch(const void *x)
+static inline void prefetch(const void *x)
{
alternative_input(ASM_NOP4,
"prefetchnta (%1)",
@@ -693,7 +693,7 @@ extern inline void prefetch(const void *x)
/* 3dnow! prefetch to get an exclusive cache line. Useful for
spinlocks to avoid one state transition in the cache coherency protocol. */
-extern inline void prefetchw(const void *x)
+static inline void prefetchw(const void *x)
{
alternative_input(ASM_NOP4,
"prefetchw (%1)",