root/arch/arm/lib/bitops.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #include <asm/assembler.h>
   3 #include <asm/unwind.h>
   4 
   5 #if __LINUX_ARM_ARCH__ >= 6
   6         .macro  bitop, name, instr
   7 ENTRY(  \name           )
   8 UNWIND( .fnstart        )
   9         ands    ip, r1, #3
  10         strbne  r1, [ip]                @ assert word-aligned
  11         mov     r2, #1
  12         and     r3, r0, #31             @ Get bit offset
  13         mov     r0, r0, lsr #5
  14         add     r1, r1, r0, lsl #2      @ Get word offset
  15 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
  16         .arch_extension mp
  17         ALT_SMP(W(pldw) [r1])
  18         ALT_UP(W(nop))
  19 #endif
  20         mov     r3, r2, lsl r3
  21 1:      ldrex   r2, [r1]
  22         \instr  r2, r2, r3
  23         strex   r0, r2, [r1]
  24         cmp     r0, #0
  25         bne     1b
  26         bx      lr
  27 UNWIND( .fnend          )
  28 ENDPROC(\name           )
  29         .endm
  30 
  31         .macro  testop, name, instr, store
  32 ENTRY(  \name           )
  33 UNWIND( .fnstart        )
  34         ands    ip, r1, #3
  35         strbne  r1, [ip]                @ assert word-aligned
  36         mov     r2, #1
  37         and     r3, r0, #31             @ Get bit offset
  38         mov     r0, r0, lsr #5
  39         add     r1, r1, r0, lsl #2      @ Get word offset
  40         mov     r3, r2, lsl r3          @ create mask
  41         smp_dmb
  42 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
  43         .arch_extension mp
  44         ALT_SMP(W(pldw) [r1])
  45         ALT_UP(W(nop))
  46 #endif
  47 1:      ldrex   r2, [r1]
  48         ands    r0, r2, r3              @ save old value of bit
  49         \instr  r2, r2, r3              @ toggle bit
  50         strex   ip, r2, [r1]
  51         cmp     ip, #0
  52         bne     1b
  53         smp_dmb
  54         cmp     r0, #0
  55         movne   r0, #1
  56 2:      bx      lr
  57 UNWIND( .fnend          )
  58 ENDPROC(\name           )
  59         .endm
  60 #else
  61         .macro  bitop, name, instr
  62 ENTRY(  \name           )
  63 UNWIND( .fnstart        )
  64         ands    ip, r1, #3
  65         strbne  r1, [ip]                @ assert word-aligned
  66         and     r2, r0, #31
  67         mov     r0, r0, lsr #5
  68         mov     r3, #1
  69         mov     r3, r3, lsl r2
  70         save_and_disable_irqs ip
  71         ldr     r2, [r1, r0, lsl #2]
  72         \instr  r2, r2, r3
  73         str     r2, [r1, r0, lsl #2]
  74         restore_irqs ip
  75         ret     lr
  76 UNWIND( .fnend          )
  77 ENDPROC(\name           )
  78         .endm
  79 
  80 /**
  81  * testop - implement a test_and_xxx_bit operation.
  82  * @instr: operational instruction
  83  * @store: store instruction
  84  *
  85  * Note: we can trivially conditionalise the store instruction
  86  * to avoid dirtying the data cache.
  87  */
  88         .macro  testop, name, instr, store
  89 ENTRY(  \name           )
  90 UNWIND( .fnstart        )
  91         ands    ip, r1, #3
  92         strbne  r1, [ip]                @ assert word-aligned
  93         and     r3, r0, #31
  94         mov     r0, r0, lsr #5
  95         save_and_disable_irqs ip
  96         ldr     r2, [r1, r0, lsl #2]!
  97         mov     r0, #1
  98         tst     r2, r0, lsl r3
  99         \instr  r2, r2, r0, lsl r3
 100         \store  r2, [r1]
 101         moveq   r0, #0
 102         restore_irqs ip
 103         ret     lr
 104 UNWIND( .fnend          )
 105 ENDPROC(\name           )
 106         .endm
 107 #endif

/* [<][>][^][v][top][bottom][index][help] */