Subversion Repositories Kolibri OS

Rev

Rev 6588 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | Download | RSS feed

  1. #ifndef _ASM_X86_ATOMIC64_32_H
  2. #define _ASM_X86_ATOMIC64_32_H
  3.  
  4. #include <linux/compiler.h>
  5. #include <linux/types.h>
  6. //#include <asm/cmpxchg.h>
  7.  
  8. /* An 64bit atomic type */
  9.  
  10. typedef struct {
  11.         u64 __aligned(8) counter;
  12. } atomic64_t;
  13.  
  14. #define ATOMIC64_INIT(val)      { (val) }
  15.  
  16. #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
  17. #ifndef ATOMIC64_EXPORT
  18. #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
  19. #else
  20. #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
  21.         ATOMIC64_EXPORT(atomic64_##sym)
  22. #endif
  23.  
  24. #ifdef CONFIG_X86_CMPXCHG64
  25. #define __alternative_atomic64(f, g, out, in...) \
  26.         asm volatile("call %P[func]" \
  27.                      : out : [func] "i" (atomic64_##g##_cx8), ## in)
  28.  
  29. #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
  30. #else
  31. #define __alternative_atomic64(f, g, out, in...) \
  32.         alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
  33.                          X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
  34.  
  35. #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
  36.         ATOMIC64_DECL_ONE(sym##_386)
  37.  
  38. ATOMIC64_DECL_ONE(add_386);
  39. ATOMIC64_DECL_ONE(sub_386);
  40. ATOMIC64_DECL_ONE(inc_386);
  41. ATOMIC64_DECL_ONE(dec_386);
  42. #endif
  43.  
  44. #define alternative_atomic64(f, out, in...) \
  45.         __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
  46.  
  47. ATOMIC64_DECL(read);
  48. ATOMIC64_DECL(set);
  49. ATOMIC64_DECL(xchg);
  50. ATOMIC64_DECL(add_return);
  51. ATOMIC64_DECL(sub_return);
  52. ATOMIC64_DECL(inc_return);
  53. ATOMIC64_DECL(dec_return);
  54. ATOMIC64_DECL(dec_if_positive);
  55. ATOMIC64_DECL(inc_not_zero);
  56. ATOMIC64_DECL(add_unless);
  57.  
  58. #undef ATOMIC64_DECL
  59. #undef ATOMIC64_DECL_ONE
  60. #undef __ATOMIC64_DECL
  61. #undef ATOMIC64_EXPORT
  62.  
  63. /**
  64.  * atomic64_cmpxchg - cmpxchg atomic64 variable
  65.  * @v: pointer to type atomic64_t
  66.  * @o: expected value
  67.  * @n: new value
  68.  *
  69.  * Atomically sets @v to @n if it was equal to @o and returns
  70.  * the old value.
  71.  */
  72.  
  73. static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
  74. {
  75.         return cmpxchg64(&v->counter, o, n);
  76. }
  77.  
  78. /**
  79.  * atomic64_xchg - xchg atomic64 variable
  80.  * @v: pointer to type atomic64_t
  81.  * @n: value to assign
  82.  *
  83.  * Atomically xchgs the value of @v to @n and returns
  84.  * the old value.
  85.  */
  86. static inline long long atomic64_xchg(atomic64_t *v, long long n)
  87. {
  88.     long long o;
  89.     unsigned high = (unsigned)(n >> 32);
  90.     unsigned low = (unsigned)n;
  91.  
  92.     asm volatile(
  93.     "1:                 \n\t"
  94.     "cmpxchg8b (%%esi)  \n\t"
  95.     "jnz 1b             \n\t"
  96.     :"=&A" (o)
  97.     :"S" (v), "b" (low), "c" (high)
  98.     : "memory", "cc");
  99.     return o;
  100. }
  101.  
  102. /**
  103.  * atomic64_set - set atomic64 variable
  104.  * @v: pointer to type atomic64_t
  105.  * @i: value to assign
  106.  *
  107.  * Atomically sets the value of @v to @n.
  108.  */
  109. static inline void atomic64_set(atomic64_t *v, long long i)
  110. {
  111.     __sync_lock_test_and_set((long long *)&v->counter, i);
  112. }
  113.  
  114. /**
  115.  * atomic64_read - read atomic64 variable
  116.  * @v: pointer to type atomic64_t
  117.  *
  118.  * Atomically reads the value of @v and returns it.
  119.  */
  120. static inline long long atomic64_read(const atomic64_t *v)
  121. {
  122.     return __sync_fetch_and_add( (long long *)&v->counter, 0);
  123. }
  124.  
  125. /**
  126.  * atomic64_add_return - add and return
  127.  * @i: integer value to add
  128.  * @v: pointer to type atomic64_t
  129.  *
  130.  * Atomically adds @i to @v and returns @i + *@v
  131.  */
  132. static inline long long atomic64_add_return(long long i, atomic64_t *v)
  133. {
  134.         alternative_atomic64(add_return,
  135.                              ASM_OUTPUT2("+A" (i), "+c" (v)),
  136.                              ASM_NO_INPUT_CLOBBER("memory"));
  137.         return i;
  138. }
  139.  
  140. /*
  141.  * Other variants with different arithmetic operators:
  142.  */
  143. static inline long long atomic64_sub_return(long long i, atomic64_t *v)
  144. {
  145.         alternative_atomic64(sub_return,
  146.                              ASM_OUTPUT2("+A" (i), "+c" (v)),
  147.                              ASM_NO_INPUT_CLOBBER("memory"));
  148.         return i;
  149. }
  150.  
  151. static inline long long atomic64_inc_return(atomic64_t *v)
  152. {
  153.         long long a;
  154.         alternative_atomic64(inc_return, "=&A" (a),
  155.                              "S" (v) : "memory", "ecx");
  156.         return a;
  157. }
  158.  
  159. static inline long long atomic64_dec_return(atomic64_t *v)
  160. {
  161.         long long a;
  162.         alternative_atomic64(dec_return, "=&A" (a),
  163.                              "S" (v) : "memory", "ecx");
  164.         return a;
  165. }
  166.  
  167. /**
  168.  * atomic64_add - add integer to atomic64 variable
  169.  * @i: integer value to add
  170.  * @v: pointer to type atomic64_t
  171.  *
  172.  * Atomically adds @i to @v.
  173.  */
  174. static inline long long atomic64_add(long long i, atomic64_t *v)
  175. {
  176.         __alternative_atomic64(add, add_return,
  177.                                ASM_OUTPUT2("+A" (i), "+c" (v)),
  178.                                ASM_NO_INPUT_CLOBBER("memory"));
  179.         return i;
  180. }
  181.  
  182. /**
  183.  * atomic64_sub - subtract the atomic64 variable
  184.  * @i: integer value to subtract
  185.  * @v: pointer to type atomic64_t
  186.  *
  187.  * Atomically subtracts @i from @v.
  188.  */
  189. static inline long long atomic64_sub(long long i, atomic64_t *v)
  190. {
  191.         __alternative_atomic64(sub, sub_return,
  192.                                ASM_OUTPUT2("+A" (i), "+c" (v)),
  193.                                ASM_NO_INPUT_CLOBBER("memory"));
  194.         return i;
  195. }
  196.  
  197. /**
  198.  * atomic64_sub_and_test - subtract value from variable and test result
  199.  * @i: integer value to subtract
  200.  * @v: pointer to type atomic64_t
  201.  *
  202.  * Atomically subtracts @i from @v and returns
  203.  * true if the result is zero, or false for all
  204.  * other cases.
  205.  */
  206. static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
  207. {
  208.         return atomic64_sub_return(i, v) == 0;
  209. }
  210.  
  211. /**
  212.  * atomic64_inc - increment atomic64 variable
  213.  * @v: pointer to type atomic64_t
  214.  *
  215.  * Atomically increments @v by 1.
  216.  */
  217. static inline void atomic64_inc(atomic64_t *v)
  218. {
  219.         __alternative_atomic64(inc, inc_return, /* no output */,
  220.                                "S" (v) : "memory", "eax", "ecx", "edx");
  221. }
  222.  
  223. /**
  224.  * atomic64_dec - decrement atomic64 variable
  225.  * @v: pointer to type atomic64_t
  226.  *
  227.  * Atomically decrements @v by 1.
  228.  */
  229. static inline void atomic64_dec(atomic64_t *v)
  230. {
  231.         __alternative_atomic64(dec, dec_return, /* no output */,
  232.                                "S" (v) : "memory", "eax", "ecx", "edx");
  233. }
  234.  
  235. /**
  236.  * atomic64_dec_and_test - decrement and test
  237.  * @v: pointer to type atomic64_t
  238.  *
  239.  * Atomically decrements @v by 1 and
  240.  * returns true if the result is 0, or false for all other
  241.  * cases.
  242.  */
  243. static inline int atomic64_dec_and_test(atomic64_t *v)
  244. {
  245.         return atomic64_dec_return(v) == 0;
  246. }
  247.  
  248. /**
  249.  * atomic64_inc_and_test - increment and test
  250.  * @v: pointer to type atomic64_t
  251.  *
  252.  * Atomically increments @v by 1
  253.  * and returns true if the result is zero, or false for all
  254.  * other cases.
  255.  */
  256. static inline int atomic64_inc_and_test(atomic64_t *v)
  257. {
  258.         return atomic64_inc_return(v) == 0;
  259. }
  260.  
  261. /**
  262.  * atomic64_add_negative - add and test if negative
  263.  * @i: integer value to add
  264.  * @v: pointer to type atomic64_t
  265.  *
  266.  * Atomically adds @i to @v and returns true
  267.  * if the result is negative, or false when
  268.  * result is greater than or equal to zero.
  269.  */
  270. static inline int atomic64_add_negative(long long i, atomic64_t *v)
  271. {
  272.         return atomic64_add_return(i, v) < 0;
  273. }
  274.  
  275. /**
  276.  * atomic64_add_unless - add unless the number is a given value
  277.  * @v: pointer of type atomic64_t
  278.  * @a: the amount to add to v...
  279.  * @u: ...unless v is equal to u.
  280.  *
  281.  * Atomically adds @a to @v, so long as it was not @u.
  282.  * Returns non-zero if the add was done, zero otherwise.
  283.  */
  284. static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
  285. {
  286.         unsigned low = (unsigned)u;
  287.         unsigned high = (unsigned)(u >> 32);
  288.         alternative_atomic64(add_unless,
  289.                              ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
  290.                              "S" (v) : "memory");
  291.         return (int)a;
  292. }
  293.  
  294.  
  295. static inline int atomic64_inc_not_zero(atomic64_t *v)
  296. {
  297.         int r;
  298.         alternative_atomic64(inc_not_zero, "=&a" (r),
  299.                              "S" (v) : "ecx", "edx", "memory");
  300.         return r;
  301. }
  302.  
  303. static inline long long atomic64_dec_if_positive(atomic64_t *v)
  304. {
  305.         long long r;
  306.         alternative_atomic64(dec_if_positive, "=&A" (r),
  307.                              "S" (v) : "ecx", "memory");
  308.         return r;
  309. }
  310.  
  311. #undef alternative_atomic64
  312. #undef __alternative_atomic64
  313.  
  314. #define ATOMIC64_OP(op, c_op)                                           \
  315. static inline void atomic64_##op(long long i, atomic64_t *v)            \
  316. {                                                                       \
  317.         long long old, c = 0;                                           \
  318.         while ((old = atomic64_cmpxchg(v, c, c c_op i)) != c)           \
  319.                 c = old;                                                \
  320. }
  321.  
  322. ATOMIC64_OP(and, &)
  323. ATOMIC64_OP(or, |)
  324. ATOMIC64_OP(xor, ^)
  325.  
  326. #undef ATOMIC64_OP
  327.  
  328. #endif /* _ASM_X86_ATOMIC64_32_H */
  329.