1/* MN10300 bit operations 2 * 3 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. 4 * Written by David Howells (dhowells@redhat.com) 5 * 6 * This program is free software; you can redistribute it and/or 7 * modify it under the terms of the GNU General Public Licence 8 * as published by the Free Software Foundation; either version 9 * 2 of the Licence, or (at your option) any later version. 10 * 11 * These have to be done with inline assembly: that way the bit-setting 12 * is guaranteed to be atomic. All bit operations return 0 if the bit 13 * was cleared before the operation and != 0 if it was not. 14 * 15 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1). 16 */ 17#ifndef __ASM_BITOPS_H 18#define __ASM_BITOPS_H 19 20#include <asm/cpu-regs.h> 21#include <asm/barrier.h> 22 23/* 24 * set bit 25 */ 26#define __set_bit(nr, addr) \ 27({ \ 28 volatile unsigned char *_a = (unsigned char *)(addr); \ 29 const unsigned shift = (nr) & 7; \ 30 _a += (nr) >> 3; \ 31 \ 32 asm volatile("bset %2,(%1) # set_bit reg" \ 33 : "=m"(*_a) \ 34 : "a"(_a), "d"(1 << shift), "m"(*_a) \ 35 : "memory", "cc"); \ 36}) 37 38#define set_bit(nr, addr) __set_bit((nr), (addr)) 39 40/* 41 * clear bit 42 */ 43#define ___clear_bit(nr, addr) \ 44({ \ 45 volatile unsigned char *_a = (unsigned char *)(addr); \ 46 const unsigned shift = (nr) & 7; \ 47 _a += (nr) >> 3; \ 48 \ 49 asm volatile("bclr %2,(%1) # clear_bit reg" \ 50 : "=m"(*_a) \ 51 : "a"(_a), "d"(1 << shift), "m"(*_a) \ 52 : "memory", "cc"); \ 53}) 54 55#define clear_bit(nr, addr) ___clear_bit((nr), (addr)) 56 57 58static inline void __clear_bit(unsigned long nr, volatile void *addr) 59{ 60 unsigned int *a = (unsigned int *) addr; 61 int mask; 62 63 a += nr >> 5; 64 mask = 1 << (nr & 0x1f); 65 *a &= ~mask; 66} 67 68/* 69 * test bit 70 */ 71static inline int test_bit(unsigned long nr, const volatile void *addr) 72{ 73 return 1UL & (((const volatile unsigned int *) addr)[nr >> 5] >> (nr & 31)); 74} 75 76/* 77 * change bit 78 */ 79static inline void __change_bit(unsigned long nr, volatile void *addr) 80{ 81 int mask; 82 unsigned int *a = (unsigned int *) addr; 83 84 a += nr >> 5; 85 mask = 1 << (nr & 0x1f); 86 *a ^= mask; 87} 88 89extern void change_bit(unsigned long nr, volatile void *addr); 90 91/* 92 * test and set bit 93 */ 94#define __test_and_set_bit(nr,addr) \ 95({ \ 96 volatile unsigned char *_a = (unsigned char *)(addr); \ 97 const unsigned shift = (nr) & 7; \ 98 unsigned epsw; \ 99 _a += (nr) >> 3; \ 100 \ 101 asm volatile("bset %3,(%2) # test_set_bit reg\n" \ 102 "mov epsw,%1" \ 103 : "=m"(*_a), "=d"(epsw) \ 104 : "a"(_a), "d"(1 << shift), "m"(*_a) \ 105 : "memory", "cc"); \ 106 \ 107 !(epsw & EPSW_FLAG_Z); \ 108}) 109 110#define test_and_set_bit(nr, addr) __test_and_set_bit((nr), (addr)) 111 112/* 113 * test and clear bit 114 */ 115#define __test_and_clear_bit(nr, addr) \ 116({ \ 117 volatile unsigned char *_a = (unsigned char *)(addr); \ 118 const unsigned shift = (nr) & 7; \ 119 unsigned epsw; \ 120 _a += (nr) >> 3; \ 121 \ 122 asm volatile("bclr %3,(%2) # test_clear_bit reg\n" \ 123 "mov epsw,%1" \ 124 : "=m"(*_a), "=d"(epsw) \ 125 : "a"(_a), "d"(1 << shift), "m"(*_a) \ 126 : "memory", "cc"); \ 127 \ 128 !(epsw & EPSW_FLAG_Z); \ 129}) 130 131#define test_and_clear_bit(nr, addr) __test_and_clear_bit((nr), (addr)) 132 133/* 134 * test and change bit 135 */ 136static inline int __test_and_change_bit(unsigned long nr, volatile void *addr) 137{ 138 int mask, retval; 139 unsigned int *a = (unsigned int *)addr; 140 141 a += nr >> 5; 142 mask = 1 << (nr & 0x1f); 143 retval = (mask & *a) != 0; 144 *a ^= mask; 145 146 return retval; 147} 148 149extern int test_and_change_bit(unsigned long nr, volatile void *addr); 150 151#include <asm-generic/bitops/lock.h> 152 153#ifdef __KERNEL__ 154 155/** 156 * __ffs - find first bit set 157 * @x: the word to search 158 * 159 * - return 31..0 to indicate bit 31..0 most least significant bit set 160 * - if no bits are set in x, the result is undefined 161 */ 162static inline __attribute__((const)) 163unsigned long __ffs(unsigned long x) 164{ 165 int bit; 166 asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(x & -x) : "cc"); 167 return bit; 168} 169 170/* 171 * special slimline version of fls() for calculating ilog2_u32() 172 * - note: no protection against n == 0 173 */ 174static inline __attribute__((const)) 175int __ilog2_u32(u32 n) 176{ 177 int bit; 178 asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(n) : "cc"); 179 return bit; 180} 181 182/** 183 * fls - find last bit set 184 * @x: the word to search 185 * 186 * This is defined the same way as ffs: 187 * - return 32..1 to indicate bit 31..0 most significant bit set 188 * - return 0 to indicate no bits set 189 */ 190static inline __attribute__((const)) 191int fls(int x) 192{ 193 return (x != 0) ? __ilog2_u32(x) + 1 : 0; 194} 195 196/** 197 * __fls - find last (most-significant) set bit in a long word 198 * @word: the word to search 199 * 200 * Undefined if no set bit exists, so code should check against 0 first. 201 */ 202static inline unsigned long __fls(unsigned long word) 203{ 204 return __ilog2_u32(word); 205} 206 207/** 208 * ffs - find first bit set 209 * @x: the word to search 210 * 211 * - return 32..1 to indicate bit 31..0 most least significant bit set 212 * - return 0 to indicate no bits set 213 */ 214static inline __attribute__((const)) 215int ffs(int x) 216{ 217 /* Note: (x & -x) gives us a mask that is the least significant 218 * (rightmost) 1-bit of the value in x. 219 */ 220 return fls(x & -x); 221} 222 223#include <asm-generic/bitops/ffz.h> 224#include <asm-generic/bitops/fls64.h> 225#include <asm-generic/bitops/find.h> 226#include <asm-generic/bitops/sched.h> 227#include <asm-generic/bitops/hweight.h> 228#include <asm-generic/bitops/ext2-atomic-setbit.h> 229#include <asm-generic/bitops/le.h> 230 231#endif /* __KERNEL__ */ 232#endif /* __ASM_BITOPS_H */ 233