1 #ifndef __ASM_SMPLOCK_H
2 #define __ASM_SMPLOCK_H
7 * i386 SMP lock implementation
9 #include <linux/interrupt.h>
10 #include <linux/spinlock.h>
11 #include <linux/sched.h>
12 #include <asm/current.h>
14 extern spinlock_cacheline_t kernel_flag_cacheline;
15 #define kernel_flag kernel_flag_cacheline.lock
17 #define kernel_locked() spin_is_locked(&kernel_flag)
20 * Release global kernel lock and global interrupt lock
22 #define release_kernel_lock(task, cpu) \
24 if (task->lock_depth >= 0) \
25 spin_unlock(&kernel_flag); \
26 release_irqlock(cpu); \
31 * Re-acquire the kernel lock
33 #define reacquire_kernel_lock(task) \
35 if (task->lock_depth >= 0) \
36 spin_lock(&kernel_flag); \
41 * Getting the big kernel lock.
43 * This cannot happen asynchronously,
44 * so we only need to worry about other
47 static __inline__ void lock_kernel(void)
50 if (!++current->lock_depth)
51 spin_lock(&kernel_flag);
58 :"=m" (__dummy_lock(&kernel_flag)),
59 "=m" (current->lock_depth));
63 static __inline__ void unlock_kernel(void)
65 if (current->lock_depth < 0)
68 if (--current->lock_depth < 0)
69 spin_unlock(&kernel_flag);
76 :"=m" (__dummy_lock(&kernel_flag)),
77 "=m" (current->lock_depth));
81 #endif /* __ASM_SMPLOCK_H */