diff -urN 2.4.6pre2/include/asm-alpha/atomic.h softirq-smp-fixes/include/asm-alpha/atomic.h --- 2.4.6pre2/include/asm-alpha/atomic.h Tue Nov 28 18:40:01 2000 +++ softirq-smp-fixes/include/asm-alpha/atomic.h Mon Jun 11 16:32:07 2001 @@ -106,4 +106,9 @@ #define atomic_inc(v) atomic_add(1,(v)) #define atomic_dec(v) atomic_sub(1,(v)) +#define smp_mb__before_atomic_inc() smp_mb() +#define smp_mb__after_atomic_inc() smp_mb() +#define smp_mb__before_atomic_dec() smp_mb() +#define smp_mb__after_atomic_dec() smp_mb() + #endif /* _ALPHA_ATOMIC_H */ diff -urN 2.4.6pre2/include/asm-arm/atomic.h softirq-smp-fixes/include/asm-arm/atomic.h --- 2.4.6pre2/include/asm-arm/atomic.h Thu Nov 16 15:37:32 2000 +++ softirq-smp-fixes/include/asm-arm/atomic.h Mon Jun 11 16:32:07 2001 @@ -36,6 +36,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) (((v)->counter) = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + static __inline__ void atomic_add(int i, volatile atomic_t *v) { unsigned long flags; diff -urN 2.4.6pre2/include/asm-cris/atomic.h softirq-smp-fixes/include/asm-cris/atomic.h --- 2.4.6pre2/include/asm-cris/atomic.h Thu Feb 22 03:45:11 2001 +++ softirq-smp-fixes/include/asm-cris/atomic.h Mon Jun 11 16:32:07 2001 @@ -25,6 +25,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) (((v)->counter) = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + /* These should be written in asm but we do it in C for now. */ static __inline__ void atomic_add(int i, volatile atomic_t *v) diff -urN 2.4.6pre2/include/asm-i386/atomic.h softirq-smp-fixes/include/asm-i386/atomic.h --- 2.4.6pre2/include/asm-i386/atomic.h Fri May 25 04:29:47 2001 +++ softirq-smp-fixes/include/asm-i386/atomic.h Mon Jun 11 18:54:41 2001 @@ -42,6 +42,15 @@ */ #define atomic_set(v,i) (((v)->counter) = (i)) +/* + * It maybe we need an explicit serialization across inc/dec on + * recent chips, if not then just replace this with a barrier(). + */ +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + /** * atomic_add - add integer to atomic variable * @i: integer value to add diff -urN 2.4.6pre2/include/asm-ia64/atomic.h softirq-smp-fixes/include/asm-ia64/atomic.h --- 2.4.6pre2/include/asm-ia64/atomic.h Thu Nov 16 15:37:42 2000 +++ softirq-smp-fixes/include/asm-ia64/atomic.h Mon Jun 11 16:32:07 2001 @@ -27,6 +27,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) (((v)->counter) = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + static __inline__ int ia64_atomic_add (int i, atomic_t *v) { diff -urN 2.4.6pre2/include/asm-m68k/atomic.h softirq-smp-fixes/include/asm-m68k/atomic.h --- 2.4.6pre2/include/asm-m68k/atomic.h Fri Nov 20 20:43:55 1998 +++ softirq-smp-fixes/include/asm-m68k/atomic.h Mon Jun 11 16:32:07 2001 @@ -16,6 +16,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v, i) (((v)->counter) = i) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + static __inline__ void atomic_add(int i, atomic_t *v) { __asm__ __volatile__("addl %1,%0" : "=m" (*v) : "id" (i), "0" (*v)); diff -urN 2.4.6pre2/include/asm-mips/atomic.h softirq-smp-fixes/include/asm-mips/atomic.h --- 2.4.6pre2/include/asm-mips/atomic.h Sat May 13 17:31:25 2000 +++ softirq-smp-fixes/include/asm-mips/atomic.h Mon Jun 11 16:32:07 2001 @@ -30,6 +30,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) ((v)->counter = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + #if !defined(CONFIG_CPU_HAS_LLSC) #include diff -urN 2.4.6pre2/include/asm-mips64/atomic.h softirq-smp-fixes/include/asm-mips64/atomic.h --- 2.4.6pre2/include/asm-mips64/atomic.h Thu Dec 14 22:34:13 2000 +++ softirq-smp-fixes/include/asm-mips64/atomic.h Mon Jun 11 16:32:07 2001 @@ -24,6 +24,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) ((v)->counter = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + extern __inline__ void atomic_add(int i, volatile atomic_t * v) { unsigned long temp; diff -urN 2.4.6pre2/include/asm-parisc/atomic.h softirq-smp-fixes/include/asm-parisc/atomic.h --- 2.4.6pre2/include/asm-parisc/atomic.h Thu Dec 14 22:34:13 2000 +++ softirq-smp-fixes/include/asm-parisc/atomic.h Mon Jun 11 16:32:07 2001 @@ -50,6 +50,10 @@ volatile int counter; } atomic_t; +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() /* It's possible to reduce all atomic operations to either * __atomic_add_return, __atomic_set and __atomic_ret (the latter * is there only for consistency). */ @@ -99,5 +103,10 @@ #define atomic_read(v) (__atomic_read(v)) #define ATOMIC_INIT(i) { (i) } + +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() #endif diff -urN 2.4.6pre2/include/asm-ppc/atomic.h softirq-smp-fixes/include/asm-ppc/atomic.h --- 2.4.6pre2/include/asm-ppc/atomic.h Sat May 26 04:03:48 2001 +++ softirq-smp-fixes/include/asm-ppc/atomic.h Mon Jun 11 16:32:07 2001 @@ -15,6 +15,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) (((v)->counter) = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + extern void atomic_clear_mask(unsigned long mask, unsigned long *addr); extern void atomic_set_mask(unsigned long mask, unsigned long *addr); diff -urN 2.4.6pre2/include/asm-s390/atomic.h softirq-smp-fixes/include/asm-s390/atomic.h --- 2.4.6pre2/include/asm-s390/atomic.h Tue May 1 19:35:31 2001 +++ softirq-smp-fixes/include/asm-s390/atomic.h Mon Jun 11 16:32:07 2001 @@ -23,6 +23,11 @@ typedef struct { volatile int counter; } atomic_t __attribute__ ((aligned (4))); #define ATOMIC_INIT(i) { (i) } +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + #define atomic_eieio() __asm__ __volatile__ ("BCR 15,0") #define __CS_LOOP(old, new, ptr, op_val, op_string) \ diff -urN 2.4.6pre2/include/asm-s390x/atomic.h softirq-smp-fixes/include/asm-s390x/atomic.h --- 2.4.6pre2/include/asm-s390x/atomic.h Tue May 1 19:35:31 2001 +++ softirq-smp-fixes/include/asm-s390x/atomic.h Mon Jun 11 16:32:07 2001 @@ -23,6 +23,11 @@ typedef struct { volatile int counter; } atomic_t __attribute__ ((aligned (4))); #define ATOMIC_INIT(i) { (i) } +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + #define atomic_eieio() __asm__ __volatile__ ("BCR 15,0") #define __CS_LOOP(old, new, ptr, op_val, op_string) \ diff -urN 2.4.6pre2/include/asm-sh/atomic.h softirq-smp-fixes/include/asm-sh/atomic.h --- 2.4.6pre2/include/asm-sh/atomic.h Thu Nov 16 15:37:33 2000 +++ softirq-smp-fixes/include/asm-sh/atomic.h Mon Jun 11 16:32:07 2001 @@ -14,6 +14,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v,i) ((v)->counter = (i)) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + #include /* diff -urN 2.4.6pre2/include/asm-sparc/atomic.h softirq-smp-fixes/include/asm-sparc/atomic.h --- 2.4.6pre2/include/asm-sparc/atomic.h Sun Apr 1 01:17:32 2001 +++ softirq-smp-fixes/include/asm-sparc/atomic.h Mon Jun 11 16:32:07 2001 @@ -48,6 +48,11 @@ #define atomic_set(v, i) (((v)->counter) = ((i) << 8)) #endif +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + static __inline__ int __atomic_add(int i, atomic_t *v) { register volatile int *ptr asm("g1"); diff -urN 2.4.6pre2/include/asm-sparc64/atomic.h softirq-smp-fixes/include/asm-sparc64/atomic.h --- 2.4.6pre2/include/asm-sparc64/atomic.h Thu Nov 16 15:37:42 2000 +++ softirq-smp-fixes/include/asm-sparc64/atomic.h Mon Jun 11 16:32:07 2001 @@ -14,6 +14,11 @@ #define atomic_read(v) ((v)->counter) #define atomic_set(v, i) (((v)->counter) = i) +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() + extern int __atomic_add(int, atomic_t *); extern int __atomic_sub(int, atomic_t *); diff -urN 2.4.6pre2/include/linux/interrupt.h softirq-smp-fixes/include/linux/interrupt.h --- 2.4.6pre2/include/linux/interrupt.h Sat Jun 9 00:04:49 2001 +++ softirq-smp-fixes/include/linux/interrupt.h Mon Jun 11 18:54:42 2001 @@ -138,22 +138,26 @@ static inline void tasklet_disable_nosync(struct tasklet_struct *t) { atomic_inc(&t->count); + smp_mb__after_atomic_inc(); } static inline void tasklet_disable(struct tasklet_struct *t) { tasklet_disable_nosync(t); tasklet_unlock_wait(t); + smp_mb(); } static inline void tasklet_enable(struct tasklet_struct *t) { + smp_mb__before_atomic_dec(); if (atomic_dec_and_test(&t->count)) tasklet_schedule(t); } static inline void tasklet_hi_enable(struct tasklet_struct *t) { + smp_mb__before_atomic_dec(); if (atomic_dec_and_test(&t->count)) tasklet_hi_schedule(t); }