1 #ifndef _LINUX_LOCALLOCK_H
2 #define _LINUX_LOCALLOCK_H
4 #include <linux/percpu.h>
5 #include <linux/spinlock.h>
7 #ifdef CONFIG_PREEMPT_RT_BASE
9 #ifdef CONFIG_DEBUG_SPINLOCK
10 # define LL_WARN(cond) WARN_ON(cond)
12 # define LL_WARN(cond) do { } while (0)
16 * per cpu lock based substitute for local_irq_*()
18 struct local_irq_lock {
20 struct task_struct *owner;
25 #define DEFINE_LOCAL_IRQ_LOCK(lvar) \
26 DEFINE_PER_CPU(struct local_irq_lock, lvar) = { \
27 .lock = __SPIN_LOCK_UNLOCKED((lvar).lock) }
29 #define DECLARE_LOCAL_IRQ_LOCK(lvar) \
30 DECLARE_PER_CPU(struct local_irq_lock, lvar)
32 #define local_irq_lock_init(lvar) \
35 for_each_possible_cpu(__cpu) \
36 spin_lock_init(&per_cpu(lvar, __cpu).lock); \
40 * spin_lock|trylock|unlock_local flavour that does not migrate disable
41 * used for __local_lock|trylock|unlock where get_local_var/put_local_var
42 * already takes care of the migrate_disable/enable
43 * for CONFIG_PREEMPT_BASE map to the normal spin_* calls.
45 #ifdef CONFIG_PREEMPT_RT_FULL
46 # define spin_lock_local(lock) rt_spin_lock__no_mg(lock)
47 # define spin_trylock_local(lock) rt_spin_trylock__no_mg(lock)
48 # define spin_unlock_local(lock) rt_spin_unlock__no_mg(lock)
50 # define spin_lock_local(lock) spin_lock(lock)
51 # define spin_trylock_local(lock) spin_trylock(lock)
52 # define spin_unlock_local(lock) spin_unlock(lock)
55 static inline void __local_lock(struct local_irq_lock *lv)
57 if (lv->owner != current) {
58 spin_lock_local(&lv->lock);
66 #define local_lock(lvar) \
67 do { __local_lock(&get_local_var(lvar)); } while (0)
69 static inline int __local_trylock(struct local_irq_lock *lv)
71 if (lv->owner != current && spin_trylock_local(&lv->lock)) {
81 #define local_trylock(lvar) \
84 __locked = __local_trylock(&get_local_var(lvar)); \
86 put_local_var(lvar); \
90 static inline void __local_unlock(struct local_irq_lock *lv)
92 LL_WARN(lv->nestcnt == 0);
93 LL_WARN(lv->owner != current);
98 spin_unlock_local(&lv->lock);
101 #define local_unlock(lvar) \
103 __local_unlock(this_cpu_ptr(&lvar)); \
104 put_local_var(lvar); \
107 static inline void __local_lock_irq(struct local_irq_lock *lv)
109 spin_lock_irqsave(&lv->lock, lv->flags);
111 LL_WARN(lv->nestcnt);
116 #define local_lock_irq(lvar) \
117 do { __local_lock_irq(&get_local_var(lvar)); } while (0)
119 #define local_lock_irq_on(lvar, cpu) \
120 do { __local_lock_irq(&per_cpu(lvar, cpu)); } while (0)
122 static inline void __local_unlock_irq(struct local_irq_lock *lv)
124 LL_WARN(!lv->nestcnt);
125 LL_WARN(lv->owner != current);
128 spin_unlock_irq(&lv->lock);
131 #define local_unlock_irq(lvar) \
133 __local_unlock_irq(this_cpu_ptr(&lvar)); \
134 put_local_var(lvar); \
137 #define local_unlock_irq_on(lvar, cpu) \
139 __local_unlock_irq(&per_cpu(lvar, cpu)); \
142 static inline int __local_lock_irqsave(struct local_irq_lock *lv)
144 if (lv->owner != current) {
145 __local_lock_irq(lv);
153 #define local_lock_irqsave(lvar, _flags) \
155 if (__local_lock_irqsave(&get_local_var(lvar))) \
156 put_local_var(lvar); \
157 _flags = __this_cpu_read(lvar.flags); \
160 #define local_lock_irqsave_on(lvar, _flags, cpu) \
162 __local_lock_irqsave(&per_cpu(lvar, cpu)); \
163 _flags = per_cpu(lvar, cpu).flags; \
166 static inline int __local_unlock_irqrestore(struct local_irq_lock *lv,
169 LL_WARN(!lv->nestcnt);
170 LL_WARN(lv->owner != current);
175 spin_unlock_irqrestore(&lv->lock, lv->flags);
179 #define local_unlock_irqrestore(lvar, flags) \
181 if (__local_unlock_irqrestore(this_cpu_ptr(&lvar), flags)) \
182 put_local_var(lvar); \
185 #define local_unlock_irqrestore_on(lvar, flags, cpu) \
187 __local_unlock_irqrestore(&per_cpu(lvar, cpu), flags); \
190 #define local_spin_trylock_irq(lvar, lock) \
193 local_lock_irq(lvar); \
194 __locked = spin_trylock(lock); \
196 local_unlock_irq(lvar); \
200 #define local_spin_lock_irq(lvar, lock) \
202 local_lock_irq(lvar); \
206 #define local_spin_unlock_irq(lvar, lock) \
209 local_unlock_irq(lvar); \
212 #define local_spin_lock_irqsave(lvar, lock, flags) \
214 local_lock_irqsave(lvar, flags); \
218 #define local_spin_unlock_irqrestore(lvar, lock, flags) \
221 local_unlock_irqrestore(lvar, flags); \
224 #define get_locked_var(lvar, var) \
227 this_cpu_ptr(&var); \
230 #define put_locked_var(lvar, var) local_unlock(lvar);
232 #define local_lock_cpu(lvar) \
235 smp_processor_id(); \
238 #define local_unlock_cpu(lvar) local_unlock(lvar)
240 #else /* PREEMPT_RT_BASE */
242 #define DEFINE_LOCAL_IRQ_LOCK(lvar) __typeof__(const int) lvar
243 #define DECLARE_LOCAL_IRQ_LOCK(lvar) extern __typeof__(const int) lvar
245 static inline void local_irq_lock_init(int lvar) { }
247 #define local_lock(lvar) preempt_disable()
248 #define local_unlock(lvar) preempt_enable()
249 #define local_lock_irq(lvar) local_irq_disable()
250 #define local_unlock_irq(lvar) local_irq_enable()
251 #define local_lock_irqsave(lvar, flags) local_irq_save(flags)
252 #define local_unlock_irqrestore(lvar, flags) local_irq_restore(flags)
254 #define local_spin_trylock_irq(lvar, lock) spin_trylock_irq(lock)
255 #define local_spin_lock_irq(lvar, lock) spin_lock_irq(lock)
256 #define local_spin_unlock_irq(lvar, lock) spin_unlock_irq(lock)
257 #define local_spin_lock_irqsave(lvar, lock, flags) \
258 spin_lock_irqsave(lock, flags)
259 #define local_spin_unlock_irqrestore(lvar, lock, flags) \
260 spin_unlock_irqrestore(lock, flags)
262 #define get_locked_var(lvar, var) get_cpu_var(var)
263 #define put_locked_var(lvar, var) put_cpu_var(var)
265 #define local_lock_cpu(lvar) get_cpu()
266 #define local_unlock_cpu(lvar) put_cpu()