diff -urpN --exclude-from=/home/davej/.exclude bk-linus/arch/i386/Kconfig linux-2.5/arch/i386/Kconfig --- bk-linus/arch/i386/Kconfig 2002-11-21 02:09:27.000000000 +0000 +++ linux-2.5/arch/i386/Kconfig 2002-11-21 18:23:48.000000000 +0000 @@ -283,6 +283,22 @@ config X86_OOSTORE depends on MWINCHIP3D || MWINCHIP2 || MWINCHIPC6 default y +config X86_SFENCE + bool + depends on MPENTIUMIII || MPENTIUM4 || MK7 + default y + +config X86_LFENCE + bool + depends on MPENTIUM4 + default y + +config X86_MFENCE + bool + depends on MPENTIUM4 + default y + + config HUGETLB_PAGE bool "Huge TLB Page Support" help diff -urpN --exclude-from=/home/davej/.exclude bk-linus/include/asm-i386/system.h linux-2.5/include/asm-i386/system.h --- bk-linus/include/asm-i386/system.h 2002-11-21 02:21:53.000000000 +0000 +++ linux-2.5/include/asm-i386/system.h 2002-11-21 18:03:25.000000000 +0000 @@ -281,10 +286,33 @@ static inline unsigned long __cmpxchg(vo * * Some non intel clones support out of order store. wmb() ceases to be a * nop for these. + * + * Pentium III introduced the SFENCE instruction for serialising all store + * operations, Pentium IV further introduced LFENCE and MFENCE for load and + * memory barriers respecively. */ - + +#ifdef CONFIG_X86_MFENCE +#define mb() __asm__ __volatile__ ("mfence": : :"memory") +#else #define mb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)": : :"memory") +#endif + +#ifdef CONFIG_X86_LFENCE +#define rmb() __asm__ __volatile__ ("lfence": : :"memory") +#else #define rmb() mb() +#endif + +#ifdef CONFIG_X86_SFENCE +#define wmb() __asm__ __volatile__ ("sfence": : :"memory") +#else +#ifdef CONFIG_X86_OOSTORE +#define wmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)": : :"memory") +#else +#define wmb() __asm__ __volatile__ ("": : :"memory") +#endif +#endif /* CONFIG_X86_SFENCE */ /** * read_barrier_depends - Flush all pending reads that subsequents reads @@ -340,12 +368,6 @@ static inline unsigned long __cmpxchg(vo #define read_barrier_depends() do { } while(0) -#ifdef CONFIG_X86_OOSTORE -#define wmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)": : :"memory") -#else -#define wmb() __asm__ __volatile__ ("": : :"memory") -#endif - #ifdef CONFIG_SMP #define smp_mb() mb() #define smp_rmb() rmb()