Subversion Repositories Kolibri OS

Compare Revisions

Regard whitespace Rev 6081 → Rev 6082

/drivers/include/asm/atomic.h
22,9 → 22,9
*
* Atomically reads the value of @v.
*/
static inline int atomic_read(const atomic_t *v)
static __always_inline int atomic_read(const atomic_t *v)
{
return ACCESS_ONCE((v)->counter);
return READ_ONCE((v)->counter);
}
 
/**
34,9 → 34,9
*
* Atomically sets the value of @v to @i.
*/
static inline void atomic_set(atomic_t *v, int i)
static __always_inline void atomic_set(atomic_t *v, int i)
{
v->counter = i;
WRITE_ONCE(v->counter, i);
}
 
/**
46,7 → 46,7
*
* Atomically adds @i to @v.
*/
static inline void atomic_add(int i, atomic_t *v)
static __always_inline void atomic_add(int i, atomic_t *v)
{
asm volatile(LOCK_PREFIX "addl %1,%0"
: "+m" (v->counter)
60,7 → 60,7
*
* Atomically subtracts @i from @v.
*/
static inline void atomic_sub(int i, atomic_t *v)
static __always_inline void atomic_sub(int i, atomic_t *v)
{
asm volatile(LOCK_PREFIX "subl %1,%0"
: "+m" (v->counter)
76,7 → 76,7
* true if the result is zero, or false for all
* other cases.
*/
static inline int atomic_sub_and_test(int i, atomic_t *v)
static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
{
GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
}
87,7 → 87,7
*
* Atomically increments @v by 1.
*/
static inline void atomic_inc(atomic_t *v)
static __always_inline void atomic_inc(atomic_t *v)
{
asm volatile(LOCK_PREFIX "incl %0"
: "+m" (v->counter));
99,7 → 99,7
*
* Atomically decrements @v by 1.
*/
static inline void atomic_dec(atomic_t *v)
static __always_inline void atomic_dec(atomic_t *v)
{
asm volatile(LOCK_PREFIX "decl %0"
: "+m" (v->counter));
113,7 → 113,7
* returns true if the result is 0, or false for all other
* cases.
*/
static inline int atomic_dec_and_test(atomic_t *v)
static __always_inline int atomic_dec_and_test(atomic_t *v)
{
GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
}
126,7 → 126,7
* and returns true if the result is zero, or false for all
* other cases.
*/
static inline int atomic_inc_and_test(atomic_t *v)
static __always_inline int atomic_inc_and_test(atomic_t *v)
{
GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
}
140,7 → 140,7
* if the result is negative, or false when
* result is greater than or equal to zero.
*/
static inline int atomic_add_negative(int i, atomic_t *v)
static __always_inline int atomic_add_negative(int i, atomic_t *v)
{
GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
}
152,7 → 152,7
*
* Atomically adds @i to @v and returns @i + @v
*/
static inline int atomic_add_return(int i, atomic_t *v)
static __always_inline int atomic_add_return(int i, atomic_t *v)
{
return i + xadd(&v->counter, i);
}
164,7 → 164,7
*
* Atomically subtracts @i from @v and returns @v - @i
*/
static inline int atomic_sub_return(int i, atomic_t *v)
static __always_inline int atomic_sub_return(int i, atomic_t *v)
{
return atomic_add_return(-i, v);
}
172,7 → 172,7
#define atomic_inc_return(v) (atomic_add_return(1, v))
#define atomic_dec_return(v) (atomic_sub_return(1, v))
 
static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
{
return cmpxchg(&v->counter, old, new);
}
182,6 → 182,21
return xchg(&v->counter, new);
}
 
#define ATOMIC_OP(op) \
static inline void atomic_##op(int i, atomic_t *v) \
{ \
asm volatile(LOCK_PREFIX #op"l %1,%0" \
: "+m" (v->counter) \
: "ir" (i) \
: "memory"); \
}
 
ATOMIC_OP(and)
ATOMIC_OP(or)
ATOMIC_OP(xor)
 
#undef ATOMIC_OP
 
/**
* __atomic_add_unless - add unless the number is already a given value
* @v: pointer of type atomic_t
191,7 → 206,7
* Atomically adds @a to @v, so long as @v was not already @u.
* Returns the old value of @v.
*/
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
c = atomic_read(v);
213,22 → 228,12
* Atomically adds 1 to @v
* Returns the new value of @u
*/
static inline short int atomic_inc_short(short int *v)
static __always_inline short int atomic_inc_short(short int *v)
{
asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
return *v;
}
 
/* These are x86-specific, used by some header files */
#define atomic_clear_mask(mask, addr) \
asm volatile(LOCK_PREFIX "andl %0,%1" \
: : "r" (~(mask)), "m" (*(addr)) : "memory")
 
#define atomic_set_mask(mask, addr) \
asm volatile(LOCK_PREFIX "orl %0,%1" \
: : "r" ((unsigned)(mask)), "m" (*(addr)) \
: "memory")
 
#ifdef CONFIG_X86_32
# include <asm/atomic64_32.h>
#else