root/include/asm-generic/local64.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #ifndef _ASM_GENERIC_LOCAL64_H
   3 #define _ASM_GENERIC_LOCAL64_H
   4 
   5 #include <linux/percpu.h>
   6 #include <asm/types.h>
   7 
   8 /*
   9  * A signed long type for operations which are atomic for a single CPU.
  10  * Usually used in combination with per-cpu variables.
  11  *
  12  * This is the default implementation, which uses atomic64_t.  Which is
  13  * rather pointless.  The whole point behind local64_t is that some processors
  14  * can perform atomic adds and subtracts in a manner which is atomic wrt IRQs
  15  * running on this CPU.  local64_t allows exploitation of such capabilities.
  16  */
  17 
  18 /* Implement in terms of atomics. */
  19 
  20 #if BITS_PER_LONG == 64
  21 
  22 #include <asm/local.h>
  23 
  24 typedef struct {
  25         local_t a;
  26 } local64_t;
  27 
  28 #define LOCAL64_INIT(i) { LOCAL_INIT(i) }
  29 
  30 #define local64_read(l)         local_read(&(l)->a)
  31 #define local64_set(l,i)        local_set((&(l)->a),(i))
  32 #define local64_inc(l)          local_inc(&(l)->a)
  33 #define local64_dec(l)          local_dec(&(l)->a)
  34 #define local64_add(i,l)        local_add((i),(&(l)->a))
  35 #define local64_sub(i,l)        local_sub((i),(&(l)->a))
  36 
  37 #define local64_sub_and_test(i, l) local_sub_and_test((i), (&(l)->a))
  38 #define local64_dec_and_test(l) local_dec_and_test(&(l)->a)
  39 #define local64_inc_and_test(l) local_inc_and_test(&(l)->a)
  40 #define local64_add_negative(i, l) local_add_negative((i), (&(l)->a))
  41 #define local64_add_return(i, l) local_add_return((i), (&(l)->a))
  42 #define local64_sub_return(i, l) local_sub_return((i), (&(l)->a))
  43 #define local64_inc_return(l)   local_inc_return(&(l)->a)
  44 
  45 #define local64_cmpxchg(l, o, n) local_cmpxchg((&(l)->a), (o), (n))
  46 #define local64_xchg(l, n)      local_xchg((&(l)->a), (n))
  47 #define local64_add_unless(l, _a, u) local_add_unless((&(l)->a), (_a), (u))
  48 #define local64_inc_not_zero(l) local_inc_not_zero(&(l)->a)
  49 
  50 /* Non-atomic variants, ie. preemption disabled and won't be touched
  51  * in interrupt, etc.  Some archs can optimize this case well. */
  52 #define __local64_inc(l)        local64_set((l), local64_read(l) + 1)
  53 #define __local64_dec(l)        local64_set((l), local64_read(l) - 1)
  54 #define __local64_add(i,l)      local64_set((l), local64_read(l) + (i))
  55 #define __local64_sub(i,l)      local64_set((l), local64_read(l) - (i))
  56 
  57 #else /* BITS_PER_LONG != 64 */
  58 
  59 #include <linux/atomic.h>
  60 
  61 /* Don't use typedef: don't want them to be mixed with atomic_t's. */
  62 typedef struct {
  63         atomic64_t a;
  64 } local64_t;
  65 
  66 #define LOCAL64_INIT(i) { ATOMIC_LONG_INIT(i) }
  67 
  68 #define local64_read(l)         atomic64_read(&(l)->a)
  69 #define local64_set(l,i)        atomic64_set((&(l)->a),(i))
  70 #define local64_inc(l)          atomic64_inc(&(l)->a)
  71 #define local64_dec(l)          atomic64_dec(&(l)->a)
  72 #define local64_add(i,l)        atomic64_add((i),(&(l)->a))
  73 #define local64_sub(i,l)        atomic64_sub((i),(&(l)->a))
  74 
  75 #define local64_sub_and_test(i, l) atomic64_sub_and_test((i), (&(l)->a))
  76 #define local64_dec_and_test(l) atomic64_dec_and_test(&(l)->a)
  77 #define local64_inc_and_test(l) atomic64_inc_and_test(&(l)->a)
  78 #define local64_add_negative(i, l) atomic64_add_negative((i), (&(l)->a))
  79 #define local64_add_return(i, l) atomic64_add_return((i), (&(l)->a))
  80 #define local64_sub_return(i, l) atomic64_sub_return((i), (&(l)->a))
  81 #define local64_inc_return(l)   atomic64_inc_return(&(l)->a)
  82 
  83 #define local64_cmpxchg(l, o, n) atomic64_cmpxchg((&(l)->a), (o), (n))
  84 #define local64_xchg(l, n)      atomic64_xchg((&(l)->a), (n))
  85 #define local64_add_unless(l, _a, u) atomic64_add_unless((&(l)->a), (_a), (u))
  86 #define local64_inc_not_zero(l) atomic64_inc_not_zero(&(l)->a)
  87 
  88 /* Non-atomic variants, ie. preemption disabled and won't be touched
  89  * in interrupt, etc.  Some archs can optimize this case well. */
  90 #define __local64_inc(l)        local64_set((l), local64_read(l) + 1)
  91 #define __local64_dec(l)        local64_set((l), local64_read(l) - 1)
  92 #define __local64_add(i,l)      local64_set((l), local64_read(l) + (i))
  93 #define __local64_sub(i,l)      local64_set((l), local64_read(l) - (i))
  94 
  95 #endif /* BITS_PER_LONG != 64 */
  96 
  97 #endif /* _ASM_GENERIC_LOCAL64_H */

/* [<][>][^][v][top][bottom][index][help] */