Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Details | Last modification | View Log | RSS feed

Rev Author Line No. Line
1321 serge 1
#ifndef _ASM_GENERIC_ATOMIC_LONG_H
2
#define _ASM_GENERIC_ATOMIC_LONG_H
3
/*
4
 * Copyright (C) 2005 Silicon Graphics, Inc.
5
 *	Christoph Lameter
6
 *
7
 * Allows to provide arch independent atomic definitions without the need to
8
 * edit all arch specific atomic.h files.
9
 */
10
 
11
#include 
12
 
13
/*
14
 * Suppport for atomic_long_t
15
 *
16
 * Casts for parameters are avoided for existing atomic functions in order to
17
 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
18
 * macros of a platform may have.
19
 */
20
 
21
#if BITS_PER_LONG == 64
22
 
23
typedef atomic64_t atomic_long_t;
24
 
25
#define ATOMIC_LONG_INIT(i)	ATOMIC64_INIT(i)
26
 
27
static inline long atomic_long_read(atomic_long_t *l)
28
{
29
	atomic64_t *v = (atomic64_t *)l;
30
 
31
	return (long)atomic64_read(v);
32
}
33
 
34
static inline void atomic_long_set(atomic_long_t *l, long i)
35
{
36
	atomic64_t *v = (atomic64_t *)l;
37
 
38
	atomic64_set(v, i);
39
}
40
 
41
static inline void atomic_long_inc(atomic_long_t *l)
42
{
43
	atomic64_t *v = (atomic64_t *)l;
44
 
45
	atomic64_inc(v);
46
}
47
 
48
static inline void atomic_long_dec(atomic_long_t *l)
49
{
50
	atomic64_t *v = (atomic64_t *)l;
51
 
52
	atomic64_dec(v);
53
}
54
 
55
static inline void atomic_long_add(long i, atomic_long_t *l)
56
{
57
	atomic64_t *v = (atomic64_t *)l;
58
 
59
	atomic64_add(i, v);
60
}
61
 
62
static inline void atomic_long_sub(long i, atomic_long_t *l)
63
{
64
	atomic64_t *v = (atomic64_t *)l;
65
 
66
	atomic64_sub(i, v);
67
}
68
 
69
static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
70
{
71
	atomic64_t *v = (atomic64_t *)l;
72
 
73
	return atomic64_sub_and_test(i, v);
74
}
75
 
76
static inline int atomic_long_dec_and_test(atomic_long_t *l)
77
{
78
	atomic64_t *v = (atomic64_t *)l;
79
 
80
	return atomic64_dec_and_test(v);
81
}
82
 
83
static inline int atomic_long_inc_and_test(atomic_long_t *l)
84
{
85
	atomic64_t *v = (atomic64_t *)l;
86
 
87
	return atomic64_inc_and_test(v);
88
}
89
 
90
static inline int atomic_long_add_negative(long i, atomic_long_t *l)
91
{
92
	atomic64_t *v = (atomic64_t *)l;
93
 
94
	return atomic64_add_negative(i, v);
95
}
96
 
97
static inline long atomic_long_add_return(long i, atomic_long_t *l)
98
{
99
	atomic64_t *v = (atomic64_t *)l;
100
 
101
	return (long)atomic64_add_return(i, v);
102
}
103
 
104
static inline long atomic_long_sub_return(long i, atomic_long_t *l)
105
{
106
	atomic64_t *v = (atomic64_t *)l;
107
 
108
	return (long)atomic64_sub_return(i, v);
109
}
110
 
111
static inline long atomic_long_inc_return(atomic_long_t *l)
112
{
113
	atomic64_t *v = (atomic64_t *)l;
114
 
115
	return (long)atomic64_inc_return(v);
116
}
117
 
118
static inline long atomic_long_dec_return(atomic_long_t *l)
119
{
120
	atomic64_t *v = (atomic64_t *)l;
121
 
122
	return (long)atomic64_dec_return(v);
123
}
124
 
125
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
126
{
127
	atomic64_t *v = (atomic64_t *)l;
128
 
129
	return (long)atomic64_add_unless(v, a, u);
130
}
131
 
132
#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
133
 
134
#define atomic_long_cmpxchg(l, old, new) \
135
	(atomic64_cmpxchg((atomic64_t *)(l), (old), (new)))
136
#define atomic_long_xchg(v, new) \
137
	(atomic64_xchg((atomic64_t *)(v), (new)))
138
 
139
#else  /*  BITS_PER_LONG == 64  */
140
 
141
typedef atomic_t atomic_long_t;
142
 
143
#define ATOMIC_LONG_INIT(i)	ATOMIC_INIT(i)
144
static inline long atomic_long_read(atomic_long_t *l)
145
{
146
	atomic_t *v = (atomic_t *)l;
147
 
148
	return (long)atomic_read(v);
149
}
150
 
151
static inline void atomic_long_set(atomic_long_t *l, long i)
152
{
153
	atomic_t *v = (atomic_t *)l;
154
 
155
	atomic_set(v, i);
156
}
157
 
158
static inline void atomic_long_inc(atomic_long_t *l)
159
{
160
	atomic_t *v = (atomic_t *)l;
161
 
162
	atomic_inc(v);
163
}
164
 
165
static inline void atomic_long_dec(atomic_long_t *l)
166
{
167
	atomic_t *v = (atomic_t *)l;
168
 
169
	atomic_dec(v);
170
}
171
 
172
static inline void atomic_long_add(long i, atomic_long_t *l)
173
{
174
	atomic_t *v = (atomic_t *)l;
175
 
176
	atomic_add(i, v);
177
}
178
 
179
static inline void atomic_long_sub(long i, atomic_long_t *l)
180
{
181
	atomic_t *v = (atomic_t *)l;
182
 
183
	atomic_sub(i, v);
184
}
185
 
186
static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
187
{
188
	atomic_t *v = (atomic_t *)l;
189
 
190
	return atomic_sub_and_test(i, v);
191
}
192
 
193
static inline int atomic_long_dec_and_test(atomic_long_t *l)
194
{
195
	atomic_t *v = (atomic_t *)l;
196
 
197
	return atomic_dec_and_test(v);
198
}
199
 
200
static inline int atomic_long_inc_and_test(atomic_long_t *l)
201
{
202
	atomic_t *v = (atomic_t *)l;
203
 
204
	return atomic_inc_and_test(v);
205
}
206
 
207
static inline int atomic_long_add_negative(long i, atomic_long_t *l)
208
{
209
	atomic_t *v = (atomic_t *)l;
210
 
211
	return atomic_add_negative(i, v);
212
}
213
 
214
static inline long atomic_long_add_return(long i, atomic_long_t *l)
215
{
216
	atomic_t *v = (atomic_t *)l;
217
 
218
	return (long)atomic_add_return(i, v);
219
}
220
 
221
static inline long atomic_long_sub_return(long i, atomic_long_t *l)
222
{
223
	atomic_t *v = (atomic_t *)l;
224
 
225
	return (long)atomic_sub_return(i, v);
226
}
227
 
228
static inline long atomic_long_inc_return(atomic_long_t *l)
229
{
230
	atomic_t *v = (atomic_t *)l;
231
 
232
	return (long)atomic_inc_return(v);
233
}
234
 
235
static inline long atomic_long_dec_return(atomic_long_t *l)
236
{
237
	atomic_t *v = (atomic_t *)l;
238
 
239
	return (long)atomic_dec_return(v);
240
}
241
 
242
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
243
{
244
	atomic_t *v = (atomic_t *)l;
245
 
246
	return (long)atomic_add_unless(v, a, u);
247
}
248
 
249
#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
250
 
251
#define atomic_long_cmpxchg(l, old, new) \
252
	(atomic_cmpxchg((atomic_t *)(l), (old), (new)))
253
#define atomic_long_xchg(v, new) \
254
	(atomic_xchg((atomic_t *)(v), (new)))
255
 
256
#endif  /*  BITS_PER_LONG == 64  */
257
 
258
#endif  /*  _ASM_GENERIC_ATOMIC_LONG_H  */