root/arch/mips/include/asm/cmpxchg.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. __xchg
  2. __cmpxchg
  3. __cmpxchg64

   1 /*
   2  * This file is subject to the terms and conditions of the GNU General Public
   3  * License.  See the file "COPYING" in the main directory of this archive
   4  * for more details.
   5  *
   6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
   7  */
   8 #ifndef __ASM_CMPXCHG_H
   9 #define __ASM_CMPXCHG_H
  10 
  11 #include <linux/bug.h>
  12 #include <linux/irqflags.h>
  13 #include <asm/compiler.h>
  14 #include <asm/war.h>
  15 
  16 /*
  17  * Using a branch-likely instruction to check the result of an sc instruction
  18  * works around a bug present in R10000 CPUs prior to revision 3.0 that could
  19  * cause ll-sc sequences to execute non-atomically.
  20  */
  21 #if R10000_LLSC_WAR
  22 # define __scbeqz "beqzl"
  23 #else
  24 # define __scbeqz "beqz"
  25 #endif
  26 
  27 /*
  28  * These functions doesn't exist, so if they are called you'll either:
  29  *
  30  * - Get an error at compile-time due to __compiletime_error, if supported by
  31  *   your compiler.
  32  *
  33  * or:
  34  *
  35  * - Get an error at link-time due to the call to the missing function.
  36  */
  37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
  38         __compiletime_error("Bad argument size for cmpxchg");
  39 extern unsigned long __cmpxchg64_unsupported(void)
  40         __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
  41 extern unsigned long __xchg_called_with_bad_pointer(void)
  42         __compiletime_error("Bad argument size for xchg");
  43 
  44 #define __xchg_asm(ld, st, m, val)                                      \
  45 ({                                                                      \
  46         __typeof(*(m)) __ret;                                           \
  47                                                                         \
  48         if (kernel_uses_llsc) {                                         \
  49                 loongson_llsc_mb();                                     \
  50                 __asm__ __volatile__(                                   \
  51                 "       .set    push                            \n"     \
  52                 "       .set    noat                            \n"     \
  53                 "       .set    push                            \n"     \
  54                 "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
  55                 "1:     " ld "  %0, %2          # __xchg_asm    \n"     \
  56                 "       .set    pop                             \n"     \
  57                 "       move    $1, %z3                         \n"     \
  58                 "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
  59                 "       " st "  $1, %1                          \n"     \
  60                 "\t" __scbeqz " $1, 1b                          \n"     \
  61                 "       .set    pop                             \n"     \
  62                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
  63                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)                  \
  64                 : __LLSC_CLOBBER);                                      \
  65         } else {                                                        \
  66                 unsigned long __flags;                                  \
  67                                                                         \
  68                 raw_local_irq_save(__flags);                            \
  69                 __ret = *m;                                             \
  70                 *m = val;                                               \
  71                 raw_local_irq_restore(__flags);                         \
  72         }                                                               \
  73                                                                         \
  74         __ret;                                                          \
  75 })
  76 
  77 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
  78                                   unsigned int size);
  79 
  80 static __always_inline
  81 unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
  82 {
  83         switch (size) {
  84         case 1:
  85         case 2:
  86                 return __xchg_small(ptr, x, size);
  87 
  88         case 4:
  89                 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
  90 
  91         case 8:
  92                 if (!IS_ENABLED(CONFIG_64BIT))
  93                         return __xchg_called_with_bad_pointer();
  94 
  95                 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
  96 
  97         default:
  98                 return __xchg_called_with_bad_pointer();
  99         }
 100 }
 101 
 102 #define xchg(ptr, x)                                                    \
 103 ({                                                                      \
 104         __typeof__(*(ptr)) __res;                                       \
 105                                                                         \
 106         smp_mb__before_llsc();                                          \
 107                                                                         \
 108         __res = (__typeof__(*(ptr)))                                    \
 109                 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));      \
 110                                                                         \
 111         smp_llsc_mb();                                                  \
 112                                                                         \
 113         __res;                                                          \
 114 })
 115 
 116 #define __cmpxchg_asm(ld, st, m, old, new)                              \
 117 ({                                                                      \
 118         __typeof(*(m)) __ret;                                           \
 119                                                                         \
 120         if (kernel_uses_llsc) {                                         \
 121                 loongson_llsc_mb();                                     \
 122                 __asm__ __volatile__(                                   \
 123                 "       .set    push                            \n"     \
 124                 "       .set    noat                            \n"     \
 125                 "       .set    push                            \n"     \
 126                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
 127                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
 128                 "       bne     %0, %z3, 2f                     \n"     \
 129                 "       .set    pop                             \n"     \
 130                 "       move    $1, %z4                         \n"     \
 131                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
 132                 "       " st "  $1, %1                          \n"     \
 133                 "\t" __scbeqz " $1, 1b                          \n"     \
 134                 "       .set    pop                             \n"     \
 135                 "2:                                             \n"     \
 136                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
 137                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)      \
 138                 : __LLSC_CLOBBER);                                      \
 139                 loongson_llsc_mb();                                     \
 140         } else {                                                        \
 141                 unsigned long __flags;                                  \
 142                                                                         \
 143                 raw_local_irq_save(__flags);                            \
 144                 __ret = *m;                                             \
 145                 if (__ret == old)                                       \
 146                         *m = new;                                       \
 147                 raw_local_irq_restore(__flags);                         \
 148         }                                                               \
 149                                                                         \
 150         __ret;                                                          \
 151 })
 152 
 153 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
 154                                      unsigned long new, unsigned int size);
 155 
 156 static __always_inline
 157 unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
 158                         unsigned long new, unsigned int size)
 159 {
 160         switch (size) {
 161         case 1:
 162         case 2:
 163                 return __cmpxchg_small(ptr, old, new, size);
 164 
 165         case 4:
 166                 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
 167                                      (u32)old, new);
 168 
 169         case 8:
 170                 /* lld/scd are only available for MIPS64 */
 171                 if (!IS_ENABLED(CONFIG_64BIT))
 172                         return __cmpxchg_called_with_bad_pointer();
 173 
 174                 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
 175                                      (u64)old, new);
 176 
 177         default:
 178                 return __cmpxchg_called_with_bad_pointer();
 179         }
 180 }
 181 
 182 #define cmpxchg_local(ptr, old, new)                                    \
 183         ((__typeof__(*(ptr)))                                           \
 184                 __cmpxchg((ptr),                                        \
 185                           (unsigned long)(__typeof__(*(ptr)))(old),     \
 186                           (unsigned long)(__typeof__(*(ptr)))(new),     \
 187                           sizeof(*(ptr))))
 188 
 189 #define cmpxchg(ptr, old, new)                                          \
 190 ({                                                                      \
 191         __typeof__(*(ptr)) __res;                                       \
 192                                                                         \
 193         smp_mb__before_llsc();                                          \
 194         __res = cmpxchg_local((ptr), (old), (new));                     \
 195         smp_llsc_mb();                                                  \
 196                                                                         \
 197         __res;                                                          \
 198 })
 199 
 200 #ifdef CONFIG_64BIT
 201 #define cmpxchg64_local(ptr, o, n)                                      \
 202   ({                                                                    \
 203         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 204         cmpxchg_local((ptr), (o), (n));                                 \
 205   })
 206 
 207 #define cmpxchg64(ptr, o, n)                                            \
 208   ({                                                                    \
 209         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
 210         cmpxchg((ptr), (o), (n));                                       \
 211   })
 212 #else
 213 
 214 # include <asm-generic/cmpxchg-local.h>
 215 # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 216 
 217 # ifdef CONFIG_SMP
 218 
 219 static inline unsigned long __cmpxchg64(volatile void *ptr,
 220                                         unsigned long long old,
 221                                         unsigned long long new)
 222 {
 223         unsigned long long tmp, ret;
 224         unsigned long flags;
 225 
 226         /*
 227          * The assembly below has to combine 32 bit values into a 64 bit
 228          * register, and split 64 bit values from one register into two. If we
 229          * were to take an interrupt in the middle of this we'd only save the
 230          * least significant 32 bits of each register & probably clobber the
 231          * most significant 32 bits of the 64 bit values we're using. In order
 232          * to avoid this we must disable interrupts.
 233          */
 234         local_irq_save(flags);
 235 
 236         loongson_llsc_mb();
 237         asm volatile(
 238         "       .set    push                            \n"
 239         "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"
 240         /* Load 64 bits from ptr */
 241         "1:     lld     %L0, %3         # __cmpxchg64   \n"
 242         /*
 243          * Split the 64 bit value we loaded into the 2 registers that hold the
 244          * ret variable.
 245          */
 246         "       dsra    %M0, %L0, 32                    \n"
 247         "       sll     %L0, %L0, 0                     \n"
 248         /*
 249          * Compare ret against old, breaking out of the loop if they don't
 250          * match.
 251          */
 252         "       bne     %M0, %M4, 2f                    \n"
 253         "       bne     %L0, %L4, 2f                    \n"
 254         /*
 255          * Combine the 32 bit halves from the 2 registers that hold the new
 256          * variable into a single 64 bit register.
 257          */
 258 #  if MIPS_ISA_REV >= 2
 259         "       move    %L1, %L5                        \n"
 260         "       dins    %L1, %M5, 32, 32                \n"
 261 #  else
 262         "       dsll    %L1, %L5, 32                    \n"
 263         "       dsrl    %L1, %L1, 32                    \n"
 264         "       .set    noat                            \n"
 265         "       dsll    $at, %M5, 32                    \n"
 266         "       or      %L1, %L1, $at                   \n"
 267         "       .set    at                              \n"
 268 #  endif
 269         /* Attempt to store new at ptr */
 270         "       scd     %L1, %2                         \n"
 271         /* If we failed, loop! */
 272         "\t" __scbeqz " %L1, 1b                         \n"
 273         "       .set    pop                             \n"
 274         "2:                                             \n"
 275         : "=&r"(ret),
 276           "=&r"(tmp),
 277           "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
 278         : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
 279           "r" (old),
 280           "r" (new)
 281         : "memory");
 282         loongson_llsc_mb();
 283 
 284         local_irq_restore(flags);
 285         return ret;
 286 }
 287 
 288 #  define cmpxchg64(ptr, o, n) ({                                       \
 289         unsigned long long __old = (__typeof__(*(ptr)))(o);             \
 290         unsigned long long __new = (__typeof__(*(ptr)))(n);             \
 291         __typeof__(*(ptr)) __res;                                       \
 292                                                                         \
 293         /*                                                              \
 294          * We can only use cmpxchg64 if we know that the CPU supports   \
 295          * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported  \
 296          * will cause a build error unless cpu_has_64bits is a          \
 297          * compile-time constant 1.                                     \
 298          */                                                             \
 299         if (cpu_has_64bits && kernel_uses_llsc) {                       \
 300                 smp_mb__before_llsc();                                  \
 301                 __res = __cmpxchg64((ptr), __old, __new);               \
 302                 smp_llsc_mb();                                          \
 303         } else {                                                        \
 304                 __res = __cmpxchg64_unsupported();                      \
 305         }                                                               \
 306                                                                         \
 307         __res;                                                          \
 308 })
 309 
 310 # else /* !CONFIG_SMP */
 311 #  define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
 312 # endif /* !CONFIG_SMP */
 313 #endif /* !CONFIG_64BIT */
 314 
 315 #undef __scbeqz
 316 
 317 #endif /* __ASM_CMPXCHG_H */

/* [<][>][^][v][top][bottom][index][help] */