root/arch/m68k/include/asm/cmpxchg.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. __xchg
  2. __xchg
  3. __cmpxchg

   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #ifndef __ARCH_M68K_CMPXCHG__
   3 #define __ARCH_M68K_CMPXCHG__
   4 
   5 #include <linux/irqflags.h>
   6 
   7 struct __xchg_dummy { unsigned long a[100]; };
   8 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
   9 
  10 extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
  11 
  12 #ifndef CONFIG_RMW_INSNS
  13 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
  14 {
  15         unsigned long flags, tmp;
  16 
  17         local_irq_save(flags);
  18 
  19         switch (size) {
  20         case 1:
  21                 tmp = *(u8 *)ptr;
  22                 *(u8 *)ptr = x;
  23                 x = tmp;
  24                 break;
  25         case 2:
  26                 tmp = *(u16 *)ptr;
  27                 *(u16 *)ptr = x;
  28                 x = tmp;
  29                 break;
  30         case 4:
  31                 tmp = *(u32 *)ptr;
  32                 *(u32 *)ptr = x;
  33                 x = tmp;
  34                 break;
  35         default:
  36                 tmp = __invalid_xchg_size(x, ptr, size);
  37                 break;
  38         }
  39 
  40         local_irq_restore(flags);
  41         return x;
  42 }
  43 #else
  44 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
  45 {
  46         switch (size) {
  47         case 1:
  48                 __asm__ __volatile__
  49                         ("moveb %2,%0\n\t"
  50                          "1:\n\t"
  51                          "casb %0,%1,%2\n\t"
  52                          "jne 1b"
  53                          : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
  54                 break;
  55         case 2:
  56                 __asm__ __volatile__
  57                         ("movew %2,%0\n\t"
  58                          "1:\n\t"
  59                          "casw %0,%1,%2\n\t"
  60                          "jne 1b"
  61                          : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
  62                 break;
  63         case 4:
  64                 __asm__ __volatile__
  65                         ("movel %2,%0\n\t"
  66                          "1:\n\t"
  67                          "casl %0,%1,%2\n\t"
  68                          "jne 1b"
  69                          : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
  70                 break;
  71         default:
  72                 x = __invalid_xchg_size(x, ptr, size);
  73                 break;
  74         }
  75         return x;
  76 }
  77 #endif
  78 
  79 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
  80 
  81 #include <asm-generic/cmpxchg-local.h>
  82 
  83 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  84 
  85 extern unsigned long __invalid_cmpxchg_size(volatile void *,
  86                                             unsigned long, unsigned long, int);
  87 
  88 /*
  89  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
  90  * store NEW in MEM.  Return the initial value in MEM.  Success is
  91  * indicated by comparing RETURN with OLD.
  92  */
  93 #ifdef CONFIG_RMW_INSNS
  94 
  95 static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
  96                                       unsigned long new, int size)
  97 {
  98         switch (size) {
  99         case 1:
 100                 __asm__ __volatile__ ("casb %0,%2,%1"
 101                                       : "=d" (old), "=m" (*(char *)p)
 102                                       : "d" (new), "0" (old), "m" (*(char *)p));
 103                 break;
 104         case 2:
 105                 __asm__ __volatile__ ("casw %0,%2,%1"
 106                                       : "=d" (old), "=m" (*(short *)p)
 107                                       : "d" (new), "0" (old), "m" (*(short *)p));
 108                 break;
 109         case 4:
 110                 __asm__ __volatile__ ("casl %0,%2,%1"
 111                                       : "=d" (old), "=m" (*(int *)p)
 112                                       : "d" (new), "0" (old), "m" (*(int *)p));
 113                 break;
 114         default:
 115                 old = __invalid_cmpxchg_size(p, old, new, size);
 116                 break;
 117         }
 118         return old;
 119 }
 120 
 121 #define cmpxchg(ptr, o, n)                                                  \
 122         ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),           \
 123                         (unsigned long)(n), sizeof(*(ptr))))
 124 #define cmpxchg_local(ptr, o, n)                                            \
 125         ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),           \
 126                         (unsigned long)(n), sizeof(*(ptr))))
 127 
 128 #define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
 129 
 130 #else
 131 
 132 /*
 133  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 134  * them available.
 135  */
 136 #define cmpxchg_local(ptr, o, n)                                               \
 137         ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
 138                         (unsigned long)(n), sizeof(*(ptr))))
 139 
 140 #include <asm-generic/cmpxchg.h>
 141 
 142 #endif
 143 
 144 #endif /* __ARCH_M68K_CMPXCHG__ */

/* [<][>][^][v][top][bottom][index][help] */