1/* bitops.h: bit operations for the Fujitsu FR-V CPUs 2 * 3 * For an explanation of how atomic ops work in this arch, see: 4 * Documentation/frv/atomic-ops.txt 5 * 6 * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved. 7 * Written by David Howells (dhowells@redhat.com) 8 * 9 * This program is free software; you can redistribute it and/or 10 * modify it under the terms of the GNU General Public License 11 * as published by the Free Software Foundation; either version 12 * 2 of the License, or (at your option) any later version. 13 */ 14#ifndef _ASM_BITOPS_H 15#define _ASM_BITOPS_H 16 17#include <linux/compiler.h> 18#include <asm/byteorder.h> 19 20#ifdef __KERNEL__ 21 22#ifndef _LINUX_BITOPS_H 23#error only <linux/bitops.h> can be included directly 24#endif 25 26#include <asm-generic/bitops/ffz.h> 27 28#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS 29static inline 30unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v) 31{ 32 unsigned long old, tmp; 33 34 asm volatile( 35 "0: \n" 36 " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */ 37 " ckeq icc3,cc7 \n" 38 " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */ 39 " orcr cc7,cc7,cc3 \n" /* set CC3 to true */ 40 " and%I3 %1,%3,%2 \n" 41 " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */ 42 " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */ 43 " beq icc3,#0,0b \n" 44 : "+U"(*v), "=&r"(old), "=r"(tmp) 45 : "NPr"(~mask) 46 : "memory", "cc7", "cc3", "icc3" 47 ); 48 49 return old; 50} 51 52static inline 53unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v) 54{ 55 unsigned long old, tmp; 56 57 asm volatile( 58 "0: \n" 59 " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */ 60 " ckeq icc3,cc7 \n" 61 " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */ 62 " orcr cc7,cc7,cc3 \n" /* set CC3 to true */ 63 " or%I3 %1,%3,%2 \n" 64 " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */ 65 " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */ 66 " beq icc3,#0,0b \n" 67 : "+U"(*v), "=&r"(old), "=r"(tmp) 68 : "NPr"(mask) 69 : "memory", "cc7", "cc3", "icc3" 70 ); 71 72 return old; 73} 74 75static inline 76unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v) 77{ 78 unsigned long old, tmp; 79 80 asm volatile( 81 "0: \n" 82 " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */ 83 " ckeq icc3,cc7 \n" 84 " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */ 85 " orcr cc7,cc7,cc3 \n" /* set CC3 to true */ 86 " xor%I3 %1,%3,%2 \n" 87 " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */ 88 " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */ 89 " beq icc3,#0,0b \n" 90 : "+U"(*v), "=&r"(old), "=r"(tmp) 91 : "NPr"(mask) 92 : "memory", "cc7", "cc3", "icc3" 93 ); 94 95 return old; 96} 97 98#else 99 100extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v); 101extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v); 102extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v); 103 104#endif 105 106#define atomic_clear_mask(mask, v) atomic_test_and_ANDNOT_mask((mask), (v)) 107#define atomic_set_mask(mask, v) atomic_test_and_OR_mask((mask), (v)) 108 109static inline int test_and_clear_bit(unsigned long nr, volatile void *addr) 110{ 111 volatile unsigned long *ptr = addr; 112 unsigned long mask = 1UL << (nr & 31); 113 ptr += nr >> 5; 114 return (atomic_test_and_ANDNOT_mask(mask, ptr) & mask) != 0; 115} 116 117static inline int test_and_set_bit(unsigned long nr, volatile void *addr) 118{ 119 volatile unsigned long *ptr = addr; 120 unsigned long mask = 1UL << (nr & 31); 121 ptr += nr >> 5; 122 return (atomic_test_and_OR_mask(mask, ptr) & mask) != 0; 123} 124 125static inline int test_and_change_bit(unsigned long nr, volatile void *addr) 126{ 127 volatile unsigned long *ptr = addr; 128 unsigned long mask = 1UL << (nr & 31); 129 ptr += nr >> 5; 130 return (atomic_test_and_XOR_mask(mask, ptr) & mask) != 0; 131} 132 133static inline void clear_bit(unsigned long nr, volatile void *addr) 134{ 135 test_and_clear_bit(nr, addr); 136} 137 138static inline void set_bit(unsigned long nr, volatile void *addr) 139{ 140 test_and_set_bit(nr, addr); 141} 142 143static inline void change_bit(unsigned long nr, volatile void *addr) 144{ 145 test_and_change_bit(nr, addr); 146} 147 148static inline void __clear_bit(unsigned long nr, volatile void *addr) 149{ 150 volatile unsigned long *a = addr; 151 int mask; 152 153 a += nr >> 5; 154 mask = 1 << (nr & 31); 155 *a &= ~mask; 156} 157 158static inline void __set_bit(unsigned long nr, volatile void *addr) 159{ 160 volatile unsigned long *a = addr; 161 int mask; 162 163 a += nr >> 5; 164 mask = 1 << (nr & 31); 165 *a |= mask; 166} 167 168static inline void __change_bit(unsigned long nr, volatile void *addr) 169{ 170 volatile unsigned long *a = addr; 171 int mask; 172 173 a += nr >> 5; 174 mask = 1 << (nr & 31); 175 *a ^= mask; 176} 177 178static inline int __test_and_clear_bit(unsigned long nr, volatile void *addr) 179{ 180 volatile unsigned long *a = addr; 181 int mask, retval; 182 183 a += nr >> 5; 184 mask = 1 << (nr & 31); 185 retval = (mask & *a) != 0; 186 *a &= ~mask; 187 return retval; 188} 189 190static inline int __test_and_set_bit(unsigned long nr, volatile void *addr) 191{ 192 volatile unsigned long *a = addr; 193 int mask, retval; 194 195 a += nr >> 5; 196 mask = 1 << (nr & 31); 197 retval = (mask & *a) != 0; 198 *a |= mask; 199 return retval; 200} 201 202static inline int __test_and_change_bit(unsigned long nr, volatile void *addr) 203{ 204 volatile unsigned long *a = addr; 205 int mask, retval; 206 207 a += nr >> 5; 208 mask = 1 << (nr & 31); 209 retval = (mask & *a) != 0; 210 *a ^= mask; 211 return retval; 212} 213 214/* 215 * This routine doesn't need to be atomic. 216 */ 217static inline int 218__constant_test_bit(unsigned long nr, const volatile void *addr) 219{ 220 return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; 221} 222 223static inline int __test_bit(unsigned long nr, const volatile void *addr) 224{ 225 int * a = (int *) addr; 226 int mask; 227 228 a += nr >> 5; 229 mask = 1 << (nr & 0x1f); 230 return ((mask & *a) != 0); 231} 232 233#define test_bit(nr,addr) \ 234(__builtin_constant_p(nr) ? \ 235 __constant_test_bit((nr),(addr)) : \ 236 __test_bit((nr),(addr))) 237 238#include <asm-generic/bitops/find.h> 239 240/** 241 * fls - find last bit set 242 * @x: the word to search 243 * 244 * This is defined the same way as ffs: 245 * - return 32..1 to indicate bit 31..0 most significant bit set 246 * - return 0 to indicate no bits set 247 */ 248#define fls(x) \ 249({ \ 250 int bit; \ 251 \ 252 asm(" subcc %1,gr0,gr0,icc0 \n" \ 253 " ckne icc0,cc4 \n" \ 254 " cscan.p %1,gr0,%0 ,cc4,#1 \n" \ 255 " csub %0,%0,%0 ,cc4,#0 \n" \ 256 " csub %2,%0,%0 ,cc4,#1 \n" \ 257 : "=&r"(bit) \ 258 : "r"(x), "r"(32) \ 259 : "icc0", "cc4" \ 260 ); \ 261 \ 262 bit; \ 263}) 264 265/** 266 * fls64 - find last bit set in a 64-bit value 267 * @n: the value to search 268 * 269 * This is defined the same way as ffs: 270 * - return 64..1 to indicate bit 63..0 most significant bit set 271 * - return 0 to indicate no bits set 272 */ 273static inline __attribute__((const)) 274int fls64(u64 n) 275{ 276 union { 277 u64 ll; 278 struct { u32 h, l; }; 279 } _; 280 int bit, x, y; 281 282 _.ll = n; 283 284 asm(" subcc.p %3,gr0,gr0,icc0 \n" 285 " subcc %4,gr0,gr0,icc1 \n" 286 " ckne icc0,cc4 \n" 287 " ckne icc1,cc5 \n" 288 " norcr cc4,cc5,cc6 \n" 289 " csub.p %0,%0,%0 ,cc6,1 \n" 290 " orcr cc5,cc4,cc4 \n" 291 " andcr cc4,cc5,cc4 \n" 292 " cscan.p %3,gr0,%0 ,cc4,0 \n" 293 " setlos #64,%1 \n" 294 " cscan.p %4,gr0,%0 ,cc4,1 \n" 295 " setlos #32,%2 \n" 296 " csub.p %1,%0,%0 ,cc4,0 \n" 297 " csub %2,%0,%0 ,cc4,1 \n" 298 : "=&r"(bit), "=r"(x), "=r"(y) 299 : "0r"(_.h), "r"(_.l) 300 : "icc0", "icc1", "cc4", "cc5", "cc6" 301 ); 302 return bit; 303 304} 305 306/** 307 * ffs - find first bit set 308 * @x: the word to search 309 * 310 * - return 32..1 to indicate bit 31..0 most least significant bit set 311 * - return 0 to indicate no bits set 312 */ 313static inline __attribute__((const)) 314int ffs(int x) 315{ 316 /* Note: (x & -x) gives us a mask that is the least significant 317 * (rightmost) 1-bit of the value in x. 318 */ 319 return fls(x & -x); 320} 321 322/** 323 * __ffs - find first bit set 324 * @x: the word to search 325 * 326 * - return 31..0 to indicate bit 31..0 most least significant bit set 327 * - if no bits are set in x, the result is undefined 328 */ 329static inline __attribute__((const)) 330int __ffs(unsigned long x) 331{ 332 int bit; 333 asm("scan %1,gr0,%0" : "=r"(bit) : "r"(x & -x)); 334 return 31 - bit; 335} 336 337/** 338 * __fls - find last (most-significant) set bit in a long word 339 * @word: the word to search 340 * 341 * Undefined if no set bit exists, so code should check against 0 first. 342 */ 343static inline unsigned long __fls(unsigned long word) 344{ 345 unsigned long bit; 346 asm("scan %1,gr0,%0" : "=r"(bit) : "r"(word)); 347 return bit; 348} 349 350/* 351 * special slimline version of fls() for calculating ilog2_u32() 352 * - note: no protection against n == 0 353 */ 354#define ARCH_HAS_ILOG2_U32 355static inline __attribute__((const)) 356int __ilog2_u32(u32 n) 357{ 358 int bit; 359 asm("scan %1,gr0,%0" : "=r"(bit) : "r"(n)); 360 return 31 - bit; 361} 362 363/* 364 * special slimline version of fls64() for calculating ilog2_u64() 365 * - note: no protection against n == 0 366 */ 367#define ARCH_HAS_ILOG2_U64 368static inline __attribute__((const)) 369int __ilog2_u64(u64 n) 370{ 371 union { 372 u64 ll; 373 struct { u32 h, l; }; 374 } _; 375 int bit, x, y; 376 377 _.ll = n; 378 379 asm(" subcc %3,gr0,gr0,icc0 \n" 380 " ckeq icc0,cc4 \n" 381 " cscan.p %3,gr0,%0 ,cc4,0 \n" 382 " setlos #63,%1 \n" 383 " cscan.p %4,gr0,%0 ,cc4,1 \n" 384 " setlos #31,%2 \n" 385 " csub.p %1,%0,%0 ,cc4,0 \n" 386 " csub %2,%0,%0 ,cc4,1 \n" 387 : "=&r"(bit), "=r"(x), "=r"(y) 388 : "0r"(_.h), "r"(_.l) 389 : "icc0", "cc4" 390 ); 391 return bit; 392} 393 394#include <asm-generic/bitops/sched.h> 395#include <asm-generic/bitops/hweight.h> 396#include <asm-generic/bitops/lock.h> 397 398#include <asm-generic/bitops/le.h> 399 400#include <asm-generic/bitops/ext2-atomic-setbit.h> 401 402#endif /* __KERNEL__ */ 403 404#endif /* _ASM_BITOPS_H */ 405