root/tools/arch/arm64/include/asm/barrier.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #ifndef _TOOLS_LINUX_ASM_AARCH64_BARRIER_H
   3 #define _TOOLS_LINUX_ASM_AARCH64_BARRIER_H
   4 
   5 /*
   6  * From tools/perf/perf-sys.h, last modified in:
   7  * f428ebd184c82a7914b2aa7e9f868918aaf7ea78 perf tools: Fix AAAAARGH64 memory barriers
   8  *
   9  * XXX: arch/arm64/include/asm/barrier.h in the kernel sources use dsb, is this
  10  * a case like for arm32 where we do things differently in userspace?
  11  */
  12 
  13 #define mb()            asm volatile("dmb ish" ::: "memory")
  14 #define wmb()           asm volatile("dmb ishst" ::: "memory")
  15 #define rmb()           asm volatile("dmb ishld" ::: "memory")
  16 
  17 /*
  18  * Kernel uses dmb variants on arm64 for smp_*() barriers. Pretty much the same
  19  * implementation as above mb()/wmb()/rmb(), though for the latter kernel uses
  20  * dsb. In any case, should above mb()/wmb()/rmb() change, make sure the below
  21  * smp_*() don't.
  22  */
  23 #define smp_mb()        asm volatile("dmb ish" ::: "memory")
  24 #define smp_wmb()       asm volatile("dmb ishst" ::: "memory")
  25 #define smp_rmb()       asm volatile("dmb ishld" ::: "memory")
  26 
  27 #define smp_store_release(p, v)                                         \
  28 do {                                                                    \
  29         union { typeof(*p) __val; char __c[1]; } __u =                  \
  30                 { .__val = (v) };                                       \
  31                                                                         \
  32         switch (sizeof(*p)) {                                           \
  33         case 1:                                                         \
  34                 asm volatile ("stlrb %w1, %0"                           \
  35                                 : "=Q" (*p)                             \
  36                                 : "r" (*(__u8_alias_t *)__u.__c)        \
  37                                 : "memory");                            \
  38                 break;                                                  \
  39         case 2:                                                         \
  40                 asm volatile ("stlrh %w1, %0"                           \
  41                                 : "=Q" (*p)                             \
  42                                 : "r" (*(__u16_alias_t *)__u.__c)       \
  43                                 : "memory");                            \
  44                 break;                                                  \
  45         case 4:                                                         \
  46                 asm volatile ("stlr %w1, %0"                            \
  47                                 : "=Q" (*p)                             \
  48                                 : "r" (*(__u32_alias_t *)__u.__c)       \
  49                                 : "memory");                            \
  50                 break;                                                  \
  51         case 8:                                                         \
  52                 asm volatile ("stlr %1, %0"                             \
  53                                 : "=Q" (*p)                             \
  54                                 : "r" (*(__u64_alias_t *)__u.__c)       \
  55                                 : "memory");                            \
  56                 break;                                                  \
  57         default:                                                        \
  58                 /* Only to shut up gcc ... */                           \
  59                 mb();                                                   \
  60                 break;                                                  \
  61         }                                                               \
  62 } while (0)
  63 
  64 #define smp_load_acquire(p)                                             \
  65 ({                                                                      \
  66         union { typeof(*p) __val; char __c[1]; } __u =                  \
  67                 { .__c = { 0 } };                                       \
  68                                                                         \
  69         switch (sizeof(*p)) {                                           \
  70         case 1:                                                         \
  71                 asm volatile ("ldarb %w0, %1"                           \
  72                         : "=r" (*(__u8_alias_t *)__u.__c)               \
  73                         : "Q" (*p) : "memory");                         \
  74                 break;                                                  \
  75         case 2:                                                         \
  76                 asm volatile ("ldarh %w0, %1"                           \
  77                         : "=r" (*(__u16_alias_t *)__u.__c)              \
  78                         : "Q" (*p) : "memory");                         \
  79                 break;                                                  \
  80         case 4:                                                         \
  81                 asm volatile ("ldar %w0, %1"                            \
  82                         : "=r" (*(__u32_alias_t *)__u.__c)              \
  83                         : "Q" (*p) : "memory");                         \
  84                 break;                                                  \
  85         case 8:                                                         \
  86                 asm volatile ("ldar %0, %1"                             \
  87                         : "=r" (*(__u64_alias_t *)__u.__c)              \
  88                         : "Q" (*p) : "memory");                         \
  89                 break;                                                  \
  90         default:                                                        \
  91                 /* Only to shut up gcc ... */                           \
  92                 mb();                                                   \
  93                 break;                                                  \
  94         }                                                               \
  95         __u.__val;                                                      \
  96 })
  97 
  98 #endif /* _TOOLS_LINUX_ASM_AARCH64_BARRIER_H */

/* [<][>][^][v][top][bottom][index][help] */