summaryrefslogtreecommitdiff
path: root/kernel
diff options
context:
space:
mode:
Diffstat (limited to 'kernel')
-rw-r--r--kernel/locking/spinlock.c12
1 files changed, 8 insertions, 4 deletions
diff --git a/kernel/locking/spinlock.c b/kernel/locking/spinlock.c
index 7685defd7c52..b42d293da38b 100644
--- a/kernel/locking/spinlock.c
+++ b/kernel/locking/spinlock.c
@@ -64,8 +64,9 @@ EXPORT_PER_CPU_SYMBOL(__mmiowb_state);
* time (making _this_ CPU preemptible if possible), and we also signal
* towards that other CPU that it should break the lock ASAP.
*/
-#define BUILD_LOCK_OPS(op, locktype) \
+#define BUILD_LOCK_OPS(op, locktype, lock_ctx_op) \
static void __lockfunc __raw_##op##_lock(locktype##_t *lock) \
+ lock_ctx_op(lock) \
{ \
for (;;) { \
preempt_disable(); \
@@ -78,6 +79,7 @@ static void __lockfunc __raw_##op##_lock(locktype##_t *lock) \
} \
\
static unsigned long __lockfunc __raw_##op##_lock_irqsave(locktype##_t *lock) \
+ lock_ctx_op(lock) \
{ \
unsigned long flags; \
\
@@ -96,11 +98,13 @@ static unsigned long __lockfunc __raw_##op##_lock_irqsave(locktype##_t *lock) \
} \
\
static void __lockfunc __raw_##op##_lock_irq(locktype##_t *lock) \
+ lock_ctx_op(lock) \
{ \
_raw_##op##_lock_irqsave(lock); \
} \
\
static void __lockfunc __raw_##op##_lock_bh(locktype##_t *lock) \
+ lock_ctx_op(lock) \
{ \
unsigned long flags; \
\
@@ -123,11 +127,11 @@ static void __lockfunc __raw_##op##_lock_bh(locktype##_t *lock) \
* __[spin|read|write]_lock_irqsave()
* __[spin|read|write]_lock_bh()
*/
-BUILD_LOCK_OPS(spin, raw_spinlock);
+BUILD_LOCK_OPS(spin, raw_spinlock, __acquires);
#ifndef CONFIG_PREEMPT_RT
-BUILD_LOCK_OPS(read, rwlock);
-BUILD_LOCK_OPS(write, rwlock);
+BUILD_LOCK_OPS(read, rwlock, __acquires_shared);
+BUILD_LOCK_OPS(write, rwlock, __acquires);
#endif
#endif