root/arch/csky/include/asm/spinlock.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. arch_spin_lock
  2. arch_spin_trylock
  3. arch_spin_unlock
  4. arch_spin_value_unlocked
  5. arch_spin_is_locked
  6. arch_spin_is_contended
  7. arch_spin_lock
  8. arch_spin_unlock
  9. arch_spin_trylock
  10. arch_read_lock
  11. arch_read_unlock
  12. arch_read_trylock
  13. arch_write_lock
  14. arch_write_unlock
  15. arch_write_trylock

   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 
   3 #ifndef __ASM_CSKY_SPINLOCK_H
   4 #define __ASM_CSKY_SPINLOCK_H
   5 
   6 #include <linux/spinlock_types.h>
   7 #include <asm/barrier.h>
   8 
   9 #ifdef CONFIG_QUEUED_RWLOCKS
  10 
  11 /*
  12  * Ticket-based spin-locking.
  13  */
  14 static inline void arch_spin_lock(arch_spinlock_t *lock)
  15 {
  16         arch_spinlock_t lockval;
  17         u32 ticket_next = 1 << TICKET_NEXT;
  18         u32 *p = &lock->lock;
  19         u32 tmp;
  20 
  21         asm volatile (
  22                 "1:     ldex.w          %0, (%2) \n"
  23                 "       mov             %1, %0   \n"
  24                 "       add             %0, %3   \n"
  25                 "       stex.w          %0, (%2) \n"
  26                 "       bez             %0, 1b   \n"
  27                 : "=&r" (tmp), "=&r" (lockval)
  28                 : "r"(p), "r"(ticket_next)
  29                 : "cc");
  30 
  31         while (lockval.tickets.next != lockval.tickets.owner)
  32                 lockval.tickets.owner = READ_ONCE(lock->tickets.owner);
  33 
  34         smp_mb();
  35 }
  36 
  37 static inline int arch_spin_trylock(arch_spinlock_t *lock)
  38 {
  39         u32 tmp, contended, res;
  40         u32 ticket_next = 1 << TICKET_NEXT;
  41         u32 *p = &lock->lock;
  42 
  43         do {
  44                 asm volatile (
  45                 "       ldex.w          %0, (%3)   \n"
  46                 "       movi            %2, 1      \n"
  47                 "       rotli           %1, %0, 16 \n"
  48                 "       cmpne           %1, %0     \n"
  49                 "       bt              1f         \n"
  50                 "       movi            %2, 0      \n"
  51                 "       add             %0, %0, %4 \n"
  52                 "       stex.w          %0, (%3)   \n"
  53                 "1:                                \n"
  54                 : "=&r" (res), "=&r" (tmp), "=&r" (contended)
  55                 : "r"(p), "r"(ticket_next)
  56                 : "cc");
  57         } while (!res);
  58 
  59         if (!contended)
  60                 smp_mb();
  61 
  62         return !contended;
  63 }
  64 
  65 static inline void arch_spin_unlock(arch_spinlock_t *lock)
  66 {
  67         smp_mb();
  68         WRITE_ONCE(lock->tickets.owner, lock->tickets.owner + 1);
  69 }
  70 
  71 static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
  72 {
  73         return lock.tickets.owner == lock.tickets.next;
  74 }
  75 
  76 static inline int arch_spin_is_locked(arch_spinlock_t *lock)
  77 {
  78         return !arch_spin_value_unlocked(READ_ONCE(*lock));
  79 }
  80 
  81 static inline int arch_spin_is_contended(arch_spinlock_t *lock)
  82 {
  83         struct __raw_tickets tickets = READ_ONCE(lock->tickets);
  84 
  85         return (tickets.next - tickets.owner) > 1;
  86 }
  87 #define arch_spin_is_contended  arch_spin_is_contended
  88 
  89 #include <asm/qrwlock.h>
  90 
  91 /* See include/linux/spinlock.h */
  92 #define smp_mb__after_spinlock()        smp_mb()
  93 
  94 #else /* CONFIG_QUEUED_RWLOCKS */
  95 
  96 /*
  97  * Test-and-set spin-locking.
  98  */
  99 static inline void arch_spin_lock(arch_spinlock_t *lock)
 100 {
 101         u32 *p = &lock->lock;
 102         u32 tmp;
 103 
 104         asm volatile (
 105                 "1:     ldex.w          %0, (%1) \n"
 106                 "       bnez            %0, 1b   \n"
 107                 "       movi            %0, 1    \n"
 108                 "       stex.w          %0, (%1) \n"
 109                 "       bez             %0, 1b   \n"
 110                 : "=&r" (tmp)
 111                 : "r"(p)
 112                 : "cc");
 113         smp_mb();
 114 }
 115 
 116 static inline void arch_spin_unlock(arch_spinlock_t *lock)
 117 {
 118         smp_mb();
 119         WRITE_ONCE(lock->lock, 0);
 120 }
 121 
 122 static inline int arch_spin_trylock(arch_spinlock_t *lock)
 123 {
 124         u32 *p = &lock->lock;
 125         u32 tmp;
 126 
 127         asm volatile (
 128                 "1:     ldex.w          %0, (%1) \n"
 129                 "       bnez            %0, 2f   \n"
 130                 "       movi            %0, 1    \n"
 131                 "       stex.w          %0, (%1) \n"
 132                 "       bez             %0, 1b   \n"
 133                 "       movi            %0, 0    \n"
 134                 "2:                              \n"
 135                 : "=&r" (tmp)
 136                 : "r"(p)
 137                 : "cc");
 138 
 139         if (!tmp)
 140                 smp_mb();
 141 
 142         return !tmp;
 143 }
 144 
 145 #define arch_spin_is_locked(x)  (READ_ONCE((x)->lock) != 0)
 146 
 147 /*
 148  * read lock/unlock/trylock
 149  */
 150 static inline void arch_read_lock(arch_rwlock_t *lock)
 151 {
 152         u32 *p = &lock->lock;
 153         u32 tmp;
 154 
 155         asm volatile (
 156                 "1:     ldex.w          %0, (%1) \n"
 157                 "       blz             %0, 1b   \n"
 158                 "       addi            %0, 1    \n"
 159                 "       stex.w          %0, (%1) \n"
 160                 "       bez             %0, 1b   \n"
 161                 : "=&r" (tmp)
 162                 : "r"(p)
 163                 : "cc");
 164         smp_mb();
 165 }
 166 
 167 static inline void arch_read_unlock(arch_rwlock_t *lock)
 168 {
 169         u32 *p = &lock->lock;
 170         u32 tmp;
 171 
 172         smp_mb();
 173         asm volatile (
 174                 "1:     ldex.w          %0, (%1) \n"
 175                 "       subi            %0, 1    \n"
 176                 "       stex.w          %0, (%1) \n"
 177                 "       bez             %0, 1b   \n"
 178                 : "=&r" (tmp)
 179                 : "r"(p)
 180                 : "cc");
 181 }
 182 
 183 static inline int arch_read_trylock(arch_rwlock_t *lock)
 184 {
 185         u32 *p = &lock->lock;
 186         u32 tmp;
 187 
 188         asm volatile (
 189                 "1:     ldex.w          %0, (%1) \n"
 190                 "       blz             %0, 2f   \n"
 191                 "       addi            %0, 1    \n"
 192                 "       stex.w          %0, (%1) \n"
 193                 "       bez             %0, 1b   \n"
 194                 "       movi            %0, 0    \n"
 195                 "2:                              \n"
 196                 : "=&r" (tmp)
 197                 : "r"(p)
 198                 : "cc");
 199 
 200         if (!tmp)
 201                 smp_mb();
 202 
 203         return !tmp;
 204 }
 205 
 206 /*
 207  * write lock/unlock/trylock
 208  */
 209 static inline void arch_write_lock(arch_rwlock_t *lock)
 210 {
 211         u32 *p = &lock->lock;
 212         u32 tmp;
 213 
 214         asm volatile (
 215                 "1:     ldex.w          %0, (%1) \n"
 216                 "       bnez            %0, 1b   \n"
 217                 "       subi            %0, 1    \n"
 218                 "       stex.w          %0, (%1) \n"
 219                 "       bez             %0, 1b   \n"
 220                 : "=&r" (tmp)
 221                 : "r"(p)
 222                 : "cc");
 223         smp_mb();
 224 }
 225 
 226 static inline void arch_write_unlock(arch_rwlock_t *lock)
 227 {
 228         smp_mb();
 229         WRITE_ONCE(lock->lock, 0);
 230 }
 231 
 232 static inline int arch_write_trylock(arch_rwlock_t *lock)
 233 {
 234         u32 *p = &lock->lock;
 235         u32 tmp;
 236 
 237         asm volatile (
 238                 "1:     ldex.w          %0, (%1) \n"
 239                 "       bnez            %0, 2f   \n"
 240                 "       subi            %0, 1    \n"
 241                 "       stex.w          %0, (%1) \n"
 242                 "       bez             %0, 1b   \n"
 243                 "       movi            %0, 0    \n"
 244                 "2:                              \n"
 245                 : "=&r" (tmp)
 246                 : "r"(p)
 247                 : "cc");
 248 
 249         if (!tmp)
 250                 smp_mb();
 251 
 252         return !tmp;
 253 }
 254 
 255 #endif /* CONFIG_QUEUED_RWLOCKS */
 256 #endif /* __ASM_CSKY_SPINLOCK_H */

/* [<][>][^][v][top][bottom][index][help] */