119,7 → 119,7 |
/* |
* Despite its name it doesn't necessarily has to be a full barrier. |
* It should only guarantee that a STORE before the critical section |
* can not be reordered with a LOAD inside this section. |
* can not be reordered with LOADs and STOREs inside this section. |
* spin_lock() is the one-way barrier, this LOAD can not escape out |
* of the region. So the default implementation simply ensures that |
* a STORE can not move into the critical section, smp_wmb() should |
129,16 → 129,6 |
#define smp_mb__before_spinlock() smp_wmb() |
#endif |
|
/* |
* Place this after a lock-acquisition primitive to guarantee that |
* an UNLOCK+LOCK pair act as a full barrier. This guarantee applies |
* if the UNLOCK and LOCK are executed by the same CPU or if the |
* UNLOCK and LOCK operate on the same lock variable. |
*/ |
#ifndef smp_mb__after_unlock_lock |
#define smp_mb__after_unlock_lock() do { } while (0) |
#endif |
|
/** |
* raw_spin_unlock_wait - wait until the spinlock gets unlocked |
* @lock: the spinlock in question. |
189,6 → 179,8 |
#ifdef CONFIG_DEBUG_LOCK_ALLOC |
# define raw_spin_lock_nested(lock, subclass) \ |
_raw_spin_lock_nested(lock, subclass) |
# define raw_spin_lock_bh_nested(lock, subclass) \ |
_raw_spin_lock_bh_nested(lock, subclass) |
|
# define raw_spin_lock_nest_lock(lock, nest_lock) \ |
do { \ |
204,6 → 196,7 |
# define raw_spin_lock_nested(lock, subclass) \ |
_raw_spin_lock(((void)(subclass), (lock))) |
# define raw_spin_lock_nest_lock(lock, nest_lock) _raw_spin_lock(lock) |
# define raw_spin_lock_bh_nested(lock, subclass) _raw_spin_lock_bh(lock) |
#endif |
|
#if defined(CONFIG_SMP) || defined(CONFIG_DEBUG_SPINLOCK) |
292,7 → 285,7 |
* Map the spin_lock functions to the raw variants for PREEMPT_RT=n |
*/ |
|
static inline raw_spinlock_t *spinlock_check(spinlock_t *lock) |
static __always_inline raw_spinlock_t *spinlock_check(spinlock_t *lock) |
{ |
return &lock->rlock; |
} |
303,17 → 296,17 |
raw_spin_lock_init(&(_lock)->rlock); \ |
} while (0) |
|
static inline void spin_lock(spinlock_t *lock) |
static __always_inline void spin_lock(spinlock_t *lock) |
{ |
raw_spin_lock(&lock->rlock); |
} |
|
static inline void spin_lock_bh(spinlock_t *lock) |
static __always_inline void spin_lock_bh(spinlock_t *lock) |
{ |
raw_spin_lock_bh(&lock->rlock); |
} |
|
static inline int spin_trylock(spinlock_t *lock) |
static __always_inline int spin_trylock(spinlock_t *lock) |
{ |
return raw_spin_trylock(&lock->rlock); |
} |
323,12 → 316,17 |
raw_spin_lock_nested(spinlock_check(lock), subclass); \ |
} while (0) |
|
#define spin_lock_bh_nested(lock, subclass) \ |
do { \ |
raw_spin_lock_bh_nested(spinlock_check(lock), subclass);\ |
} while (0) |
|
#define spin_lock_nest_lock(lock, nest_lock) \ |
do { \ |
raw_spin_lock_nest_lock(spinlock_check(lock), nest_lock); \ |
} while (0) |
|
static inline void spin_lock_irq(spinlock_t *lock) |
static __always_inline void spin_lock_irq(spinlock_t *lock) |
{ |
raw_spin_lock_irq(&lock->rlock); |
} |
343,32 → 341,32 |
raw_spin_lock_irqsave_nested(spinlock_check(lock), flags, subclass); \ |
} while (0) |
|
static inline void spin_unlock(spinlock_t *lock) |
static __always_inline void spin_unlock(spinlock_t *lock) |
{ |
raw_spin_unlock(&lock->rlock); |
} |
|
static inline void spin_unlock_bh(spinlock_t *lock) |
static __always_inline void spin_unlock_bh(spinlock_t *lock) |
{ |
raw_spin_unlock_bh(&lock->rlock); |
} |
|
static inline void spin_unlock_irq(spinlock_t *lock) |
static __always_inline void spin_unlock_irq(spinlock_t *lock) |
{ |
raw_spin_unlock_irq(&lock->rlock); |
} |
|
static inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) |
static __always_inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) |
{ |
raw_spin_unlock_irqrestore(&lock->rlock, flags); |
} |
|
static inline int spin_trylock_bh(spinlock_t *lock) |
static __always_inline int spin_trylock_bh(spinlock_t *lock) |
{ |
return raw_spin_trylock_bh(&lock->rlock); |
} |
|
static inline int spin_trylock_irq(spinlock_t *lock) |
static __always_inline int spin_trylock_irq(spinlock_t *lock) |
{ |
return raw_spin_trylock_irq(&lock->rlock); |
} |
378,22 → 376,22 |
raw_spin_trylock_irqsave(spinlock_check(lock), flags); \ |
}) |
|
static inline void spin_unlock_wait(spinlock_t *lock) |
static __always_inline void spin_unlock_wait(spinlock_t *lock) |
{ |
raw_spin_unlock_wait(&lock->rlock); |
} |
|
static inline int spin_is_locked(spinlock_t *lock) |
static __always_inline int spin_is_locked(spinlock_t *lock) |
{ |
return raw_spin_is_locked(&lock->rlock); |
} |
|
static inline int spin_is_contended(spinlock_t *lock) |
static __always_inline int spin_is_contended(spinlock_t *lock) |
{ |
return raw_spin_is_contended(&lock->rlock); |
} |
|
static inline int spin_can_lock(spinlock_t *lock) |
static __always_inline int spin_can_lock(spinlock_t *lock) |
{ |
return raw_spin_can_lock(&lock->rlock); |
} |