Subversion Repositories Kolibri OS

Rev

Rev 5272 | Go to most recent revision | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 5272 Rev 6082
Line 117... Line 117...
117
#endif
117
#endif
Line 118... Line 118...
118
 
118
 
119
/*
119
/*
120
 * Despite its name it doesn't necessarily has to be a full barrier.
120
 * Despite its name it doesn't necessarily has to be a full barrier.
121
 * It should only guarantee that a STORE before the critical section
121
 * It should only guarantee that a STORE before the critical section
122
 * can not be reordered with a LOAD inside this section.
122
 * can not be reordered with LOADs and STOREs inside this section.
123
 * spin_lock() is the one-way barrier, this LOAD can not escape out
123
 * spin_lock() is the one-way barrier, this LOAD can not escape out
124
 * of the region. So the default implementation simply ensures that
124
 * of the region. So the default implementation simply ensures that
125
 * a STORE can not move into the critical section, smp_wmb() should
125
 * a STORE can not move into the critical section, smp_wmb() should
126
 * serialize it with another STORE done by spin_lock().
126
 * serialize it with another STORE done by spin_lock().
127
 */
127
 */
128
#ifndef smp_mb__before_spinlock
128
#ifndef smp_mb__before_spinlock
129
#define smp_mb__before_spinlock()	smp_wmb()
129
#define smp_mb__before_spinlock()	smp_wmb()
Line 130... Line -...
130
#endif
-
 
131
 
-
 
132
/*
-
 
133
 * Place this after a lock-acquisition primitive to guarantee that
-
 
134
 * an UNLOCK+LOCK pair act as a full barrier.  This guarantee applies
-
 
135
 * if the UNLOCK and LOCK are executed by the same CPU or if the
-
 
136
 * UNLOCK and LOCK operate on the same lock variable.
-
 
137
 */
-
 
138
#ifndef smp_mb__after_unlock_lock
-
 
139
#define smp_mb__after_unlock_lock()	do { } while (0)
-
 
140
#endif
130
#endif
141
 
131
 
142
/**
132
/**
143
 * raw_spin_unlock_wait - wait until the spinlock gets unlocked
133
 * raw_spin_unlock_wait - wait until the spinlock gets unlocked
144
 * @lock: the spinlock in question.
134
 * @lock: the spinlock in question.
Line 187... Line 177...
187
#define raw_spin_lock(lock)	_raw_spin_lock(lock)
177
#define raw_spin_lock(lock)	_raw_spin_lock(lock)
Line 188... Line 178...
188
 
178
 
189
#ifdef CONFIG_DEBUG_LOCK_ALLOC
179
#ifdef CONFIG_DEBUG_LOCK_ALLOC
190
# define raw_spin_lock_nested(lock, subclass) \
180
# define raw_spin_lock_nested(lock, subclass) \
-
 
181
	_raw_spin_lock_nested(lock, subclass)
-
 
182
# define raw_spin_lock_bh_nested(lock, subclass) \
Line 191... Line 183...
191
	_raw_spin_lock_nested(lock, subclass)
183
	_raw_spin_lock_bh_nested(lock, subclass)
192
 
184
 
193
# define raw_spin_lock_nest_lock(lock, nest_lock)			\
185
# define raw_spin_lock_nest_lock(lock, nest_lock)			\
194
	 do {								\
186
	 do {								\
Line 202... Line 194...
202
 * CONFIG_DEBUG_LOCK_ALLOC=n and with W=1.
194
 * CONFIG_DEBUG_LOCK_ALLOC=n and with W=1.
203
 */
195
 */
204
# define raw_spin_lock_nested(lock, subclass)		\
196
# define raw_spin_lock_nested(lock, subclass)		\
205
	_raw_spin_lock(((void)(subclass), (lock)))
197
	_raw_spin_lock(((void)(subclass), (lock)))
206
# define raw_spin_lock_nest_lock(lock, nest_lock)	_raw_spin_lock(lock)
198
# define raw_spin_lock_nest_lock(lock, nest_lock)	_raw_spin_lock(lock)
-
 
199
# define raw_spin_lock_bh_nested(lock, subclass)	_raw_spin_lock_bh(lock)
207
#endif
200
#endif
Line 208... Line 201...
208
 
201
 
Line 209... Line 202...
209
#if defined(CONFIG_SMP) || defined(CONFIG_DEBUG_SPINLOCK)
202
#if defined(CONFIG_SMP) || defined(CONFIG_DEBUG_SPINLOCK)
Line 290... Line 283...
290
 
283
 
291
/*
284
/*
292
 * Map the spin_lock functions to the raw variants for PREEMPT_RT=n
285
 * Map the spin_lock functions to the raw variants for PREEMPT_RT=n
Line 293... Line 286...
293
 */
286
 */
294
 
287
 
295
static inline raw_spinlock_t *spinlock_check(spinlock_t *lock)
288
static __always_inline raw_spinlock_t *spinlock_check(spinlock_t *lock)
296
{
289
{
Line 297... Line 290...
297
	return &lock->rlock;
290
	return &lock->rlock;
298
}
291
}
299
 
292
 
300
#define spin_lock_init(_lock)				\
293
#define spin_lock_init(_lock)				\
301
do {							\
294
do {							\
Line 302... Line 295...
302
	spinlock_check(_lock);				\
295
	spinlock_check(_lock);				\
303
	raw_spin_lock_init(&(_lock)->rlock);		\
296
	raw_spin_lock_init(&(_lock)->rlock);		\
304
} while (0)
297
} while (0)
305
 
298
 
Line 306... Line 299...
306
static inline void spin_lock(spinlock_t *lock)
299
static __always_inline void spin_lock(spinlock_t *lock)
307
{
300
{
308
	raw_spin_lock(&lock->rlock);
301
	raw_spin_lock(&lock->rlock);
309
}
302
}
Line 310... Line 303...
310
 
303
 
311
static inline void spin_lock_bh(spinlock_t *lock)
304
static __always_inline void spin_lock_bh(spinlock_t *lock)
312
{
305
{
313
	raw_spin_lock_bh(&lock->rlock);
306
	raw_spin_lock_bh(&lock->rlock);
Line 314... Line 307...
314
}
307
}
315
 
308
 
316
static inline int spin_trylock(spinlock_t *lock)
309
static __always_inline int spin_trylock(spinlock_t *lock)
317
{
310
{
Line -... Line 311...
-
 
311
	return raw_spin_trylock(&lock->rlock);
-
 
312
}
-
 
313
 
-
 
314
#define spin_lock_nested(lock, subclass)			\
-
 
315
do {								\
318
	return raw_spin_trylock(&lock->rlock);
316
	raw_spin_lock_nested(spinlock_check(lock), subclass);	\
319
}
317
} while (0)
320
 
318
 
321
#define spin_lock_nested(lock, subclass)			\
319
#define spin_lock_bh_nested(lock, subclass)			\
Line 322... Line 320...
322
do {								\
320
do {								\
323
	raw_spin_lock_nested(spinlock_check(lock), subclass);	\
321
	raw_spin_lock_bh_nested(spinlock_check(lock), subclass);\
324
} while (0)
322
} while (0)
325
 
323
 
Line 326... Line 324...
326
#define spin_lock_nest_lock(lock, nest_lock)				\
324
#define spin_lock_nest_lock(lock, nest_lock)				\
Line 341... Line 339...
341
#define spin_lock_irqsave_nested(lock, flags, subclass)			\
339
#define spin_lock_irqsave_nested(lock, flags, subclass)			\
342
do {									\
340
do {									\
343
	raw_spin_lock_irqsave_nested(spinlock_check(lock), flags, subclass); \
341
	raw_spin_lock_irqsave_nested(spinlock_check(lock), flags, subclass); \
344
} while (0)
342
} while (0)
Line 345... Line 343...
345
 
343
 
346
static inline void spin_unlock(spinlock_t *lock)
344
static __always_inline void spin_unlock(spinlock_t *lock)
347
{
345
{
348
	raw_spin_unlock(&lock->rlock);
346
	raw_spin_unlock(&lock->rlock);
Line 349... Line 347...
349
}
347
}
350
 
348
 
351
static inline void spin_unlock_bh(spinlock_t *lock)
349
static __always_inline void spin_unlock_bh(spinlock_t *lock)
352
{
350
{
Line 353... Line 351...
353
	raw_spin_unlock_bh(&lock->rlock);
351
	raw_spin_unlock_bh(&lock->rlock);
354
}
352
}
355
 
353
 
356
static inline void spin_unlock_irq(spinlock_t *lock)
354
static __always_inline void spin_unlock_irq(spinlock_t *lock)
Line 357... Line 355...
357
{
355
{
358
	raw_spin_unlock_irq(&lock->rlock);
356
	raw_spin_unlock_irq(&lock->rlock);
359
}
357
}
360
 
358
 
Line 361... Line 359...
361
static inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags)
359
static __always_inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags)
362
{
360
{
363
	raw_spin_unlock_irqrestore(&lock->rlock, flags);
361
	raw_spin_unlock_irqrestore(&lock->rlock, flags);
364
}
362
}
Line 365... Line 363...
365
 
363
 
366
static inline int spin_trylock_bh(spinlock_t *lock)
364
static __always_inline int spin_trylock_bh(spinlock_t *lock)
367
{
365
{
368
	return raw_spin_trylock_bh(&lock->rlock);
366
	return raw_spin_trylock_bh(&lock->rlock);
Line 369... Line 367...
369
}
367
}
370
 
368
 
371
static inline int spin_trylock_irq(spinlock_t *lock)
369
static __always_inline int spin_trylock_irq(spinlock_t *lock)
372
{
370
{
Line 373... Line 371...
373
	return raw_spin_trylock_irq(&lock->rlock);
371
	return raw_spin_trylock_irq(&lock->rlock);
374
}
372
}
375
 
373
 
376
#define spin_trylock_irqsave(lock, flags)			\
374
#define spin_trylock_irqsave(lock, flags)			\
Line 377... Line 375...
377
({ \
375
({								\
378
	raw_spin_trylock_irqsave(spinlock_check(lock), flags); \
376
	raw_spin_trylock_irqsave(spinlock_check(lock), flags); \
379
})
377
})
380
 
378
 
Line 381... Line 379...
381
static inline void spin_unlock_wait(spinlock_t *lock)
379
static __always_inline void spin_unlock_wait(spinlock_t *lock)
382
{
380
{
383
	raw_spin_unlock_wait(&lock->rlock);
381
	raw_spin_unlock_wait(&lock->rlock);
384
}
382
}
Line 385... Line 383...
385
 
383
 
386
static inline int spin_is_locked(spinlock_t *lock)
384
static __always_inline int spin_is_locked(spinlock_t *lock)
387
{
385
{
388
	return raw_spin_is_locked(&lock->rlock);
386
	return raw_spin_is_locked(&lock->rlock);
Line 389... Line 387...
389
}
387
}