at v2.6.23 910 lines 23 kB view raw
1#ifndef _S390_BITOPS_H 2#define _S390_BITOPS_H 3 4/* 5 * include/asm-s390/bitops.h 6 * 7 * S390 version 8 * Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation 9 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com) 10 * 11 * Derived from "include/asm-i386/bitops.h" 12 * Copyright (C) 1992, Linus Torvalds 13 * 14 */ 15 16#ifdef __KERNEL__ 17 18#include <linux/compiler.h> 19 20/* 21 * 32 bit bitops format: 22 * bit 0 is the LSB of *addr; bit 31 is the MSB of *addr; 23 * bit 32 is the LSB of *(addr+4). That combined with the 24 * big endian byte order on S390 give the following bit 25 * order in memory: 26 * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 \ 27 * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 28 * after that follows the next long with bit numbers 29 * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30 30 * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20 31 * The reason for this bit ordering is the fact that 32 * in the architecture independent code bits operations 33 * of the form "flags |= (1 << bitnr)" are used INTERMIXED 34 * with operation of the form "set_bit(bitnr, flags)". 35 * 36 * 64 bit bitops format: 37 * bit 0 is the LSB of *addr; bit 63 is the MSB of *addr; 38 * bit 64 is the LSB of *(addr+8). That combined with the 39 * big endian byte order on S390 give the following bit 40 * order in memory: 41 * 3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30 42 * 2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20 43 * 1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 44 * 0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00 45 * after that follows the next long with bit numbers 46 * 7f 7e 7d 7c 7b 7a 79 78 77 76 75 74 73 72 71 70 47 * 6f 6e 6d 6c 6b 6a 69 68 67 66 65 64 63 62 61 60 48 * 5f 5e 5d 5c 5b 5a 59 58 57 56 55 54 53 52 51 50 49 * 4f 4e 4d 4c 4b 4a 49 48 47 46 45 44 43 42 41 40 50 * The reason for this bit ordering is the fact that 51 * in the architecture independent code bits operations 52 * of the form "flags |= (1 << bitnr)" are used INTERMIXED 53 * with operation of the form "set_bit(bitnr, flags)". 54 */ 55 56/* bitmap tables from arch/S390/kernel/bitmap.S */ 57extern const char _oi_bitmap[]; 58extern const char _ni_bitmap[]; 59extern const char _zb_findmap[]; 60extern const char _sb_findmap[]; 61 62#ifndef __s390x__ 63 64#define __BITOPS_ALIGN 3 65#define __BITOPS_WORDSIZE 32 66#define __BITOPS_OR "or" 67#define __BITOPS_AND "nr" 68#define __BITOPS_XOR "xr" 69 70#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) 71 72#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \ 73 asm volatile( \ 74 " l %0,%2\n" \ 75 "0: lr %1,%0\n" \ 76 __op_string " %1,%3\n" \ 77 " cs %0,%1,%2\n" \ 78 " jl 0b" \ 79 : "=&d" (__old), "=&d" (__new), \ 80 "=Q" (*(unsigned long *) __addr) \ 81 : "d" (__val), "Q" (*(unsigned long *) __addr) \ 82 : "cc"); 83 84#else /* __GNUC__ */ 85 86#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \ 87 asm volatile( \ 88 " l %0,0(%4)\n" \ 89 "0: lr %1,%0\n" \ 90 __op_string " %1,%3\n" \ 91 " cs %0,%1,0(%4)\n" \ 92 " jl 0b" \ 93 : "=&d" (__old), "=&d" (__new), \ 94 "=m" (*(unsigned long *) __addr) \ 95 : "d" (__val), "a" (__addr), \ 96 "m" (*(unsigned long *) __addr) : "cc"); 97 98#endif /* __GNUC__ */ 99 100#else /* __s390x__ */ 101 102#define __BITOPS_ALIGN 7 103#define __BITOPS_WORDSIZE 64 104#define __BITOPS_OR "ogr" 105#define __BITOPS_AND "ngr" 106#define __BITOPS_XOR "xgr" 107 108#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) 109 110#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \ 111 asm volatile( \ 112 " lg %0,%2\n" \ 113 "0: lgr %1,%0\n" \ 114 __op_string " %1,%3\n" \ 115 " csg %0,%1,%2\n" \ 116 " jl 0b" \ 117 : "=&d" (__old), "=&d" (__new), \ 118 "=Q" (*(unsigned long *) __addr) \ 119 : "d" (__val), "Q" (*(unsigned long *) __addr) \ 120 : "cc"); 121 122#else /* __GNUC__ */ 123 124#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string) \ 125 asm volatile( \ 126 " lg %0,0(%4)\n" \ 127 "0: lgr %1,%0\n" \ 128 __op_string " %1,%3\n" \ 129 " csg %0,%1,0(%4)\n" \ 130 " jl 0b" \ 131 : "=&d" (__old), "=&d" (__new), \ 132 "=m" (*(unsigned long *) __addr) \ 133 : "d" (__val), "a" (__addr), \ 134 "m" (*(unsigned long *) __addr) : "cc"); 135 136 137#endif /* __GNUC__ */ 138 139#endif /* __s390x__ */ 140 141#define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1)/__BITOPS_WORDSIZE) 142#define __BITOPS_BARRIER() asm volatile("" : : : "memory") 143 144#ifdef CONFIG_SMP 145/* 146 * SMP safe set_bit routine based on compare and swap (CS) 147 */ 148static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr) 149{ 150 unsigned long addr, old, new, mask; 151 152 addr = (unsigned long) ptr; 153 /* calculate address for CS */ 154 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 155 /* make OR mask */ 156 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 157 /* Do the atomic update. */ 158 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR); 159} 160 161/* 162 * SMP safe clear_bit routine based on compare and swap (CS) 163 */ 164static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) 165{ 166 unsigned long addr, old, new, mask; 167 168 addr = (unsigned long) ptr; 169 /* calculate address for CS */ 170 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 171 /* make AND mask */ 172 mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1))); 173 /* Do the atomic update. */ 174 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND); 175} 176 177/* 178 * SMP safe change_bit routine based on compare and swap (CS) 179 */ 180static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr) 181{ 182 unsigned long addr, old, new, mask; 183 184 addr = (unsigned long) ptr; 185 /* calculate address for CS */ 186 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 187 /* make XOR mask */ 188 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 189 /* Do the atomic update. */ 190 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR); 191} 192 193/* 194 * SMP safe test_and_set_bit routine based on compare and swap (CS) 195 */ 196static inline int 197test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr) 198{ 199 unsigned long addr, old, new, mask; 200 201 addr = (unsigned long) ptr; 202 /* calculate address for CS */ 203 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 204 /* make OR/test mask */ 205 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 206 /* Do the atomic update. */ 207 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR); 208 __BITOPS_BARRIER(); 209 return (old & mask) != 0; 210} 211 212/* 213 * SMP safe test_and_clear_bit routine based on compare and swap (CS) 214 */ 215static inline int 216test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) 217{ 218 unsigned long addr, old, new, mask; 219 220 addr = (unsigned long) ptr; 221 /* calculate address for CS */ 222 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 223 /* make AND/test mask */ 224 mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1))); 225 /* Do the atomic update. */ 226 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND); 227 __BITOPS_BARRIER(); 228 return (old ^ new) != 0; 229} 230 231/* 232 * SMP safe test_and_change_bit routine based on compare and swap (CS) 233 */ 234static inline int 235test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr) 236{ 237 unsigned long addr, old, new, mask; 238 239 addr = (unsigned long) ptr; 240 /* calculate address for CS */ 241 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 242 /* make XOR/test mask */ 243 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 244 /* Do the atomic update. */ 245 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR); 246 __BITOPS_BARRIER(); 247 return (old & mask) != 0; 248} 249#endif /* CONFIG_SMP */ 250 251/* 252 * fast, non-SMP set_bit routine 253 */ 254static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr) 255{ 256 unsigned long addr; 257 258 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 259 asm volatile( 260 " oc 0(1,%1),0(%2)" 261 : "=m" (*(char *) addr) : "a" (addr), 262 "a" (_oi_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc" ); 263} 264 265static inline void 266__constant_set_bit(const unsigned long nr, volatile unsigned long *ptr) 267{ 268 unsigned long addr; 269 270 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 271 *(unsigned char *) addr |= 1 << (nr & 7); 272} 273 274#define set_bit_simple(nr,addr) \ 275(__builtin_constant_p((nr)) ? \ 276 __constant_set_bit((nr),(addr)) : \ 277 __set_bit((nr),(addr)) ) 278 279/* 280 * fast, non-SMP clear_bit routine 281 */ 282static inline void 283__clear_bit(unsigned long nr, volatile unsigned long *ptr) 284{ 285 unsigned long addr; 286 287 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 288 asm volatile( 289 " nc 0(1,%1),0(%2)" 290 : "=m" (*(char *) addr) : "a" (addr), 291 "a" (_ni_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc"); 292} 293 294static inline void 295__constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr) 296{ 297 unsigned long addr; 298 299 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 300 *(unsigned char *) addr &= ~(1 << (nr & 7)); 301} 302 303#define clear_bit_simple(nr,addr) \ 304(__builtin_constant_p((nr)) ? \ 305 __constant_clear_bit((nr),(addr)) : \ 306 __clear_bit((nr),(addr)) ) 307 308/* 309 * fast, non-SMP change_bit routine 310 */ 311static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr) 312{ 313 unsigned long addr; 314 315 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 316 asm volatile( 317 " xc 0(1,%1),0(%2)" 318 : "=m" (*(char *) addr) : "a" (addr), 319 "a" (_oi_bitmap + (nr & 7)), "m" (*(char *) addr) : "cc" ); 320} 321 322static inline void 323__constant_change_bit(const unsigned long nr, volatile unsigned long *ptr) 324{ 325 unsigned long addr; 326 327 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 328 *(unsigned char *) addr ^= 1 << (nr & 7); 329} 330 331#define change_bit_simple(nr,addr) \ 332(__builtin_constant_p((nr)) ? \ 333 __constant_change_bit((nr),(addr)) : \ 334 __change_bit((nr),(addr)) ) 335 336/* 337 * fast, non-SMP test_and_set_bit routine 338 */ 339static inline int 340test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr) 341{ 342 unsigned long addr; 343 unsigned char ch; 344 345 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 346 ch = *(unsigned char *) addr; 347 asm volatile( 348 " oc 0(1,%1),0(%2)" 349 : "=m" (*(char *) addr) 350 : "a" (addr), "a" (_oi_bitmap + (nr & 7)), 351 "m" (*(char *) addr) : "cc", "memory"); 352 return (ch >> (nr & 7)) & 1; 353} 354#define __test_and_set_bit(X,Y) test_and_set_bit_simple(X,Y) 355 356/* 357 * fast, non-SMP test_and_clear_bit routine 358 */ 359static inline int 360test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr) 361{ 362 unsigned long addr; 363 unsigned char ch; 364 365 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 366 ch = *(unsigned char *) addr; 367 asm volatile( 368 " nc 0(1,%1),0(%2)" 369 : "=m" (*(char *) addr) 370 : "a" (addr), "a" (_ni_bitmap + (nr & 7)), 371 "m" (*(char *) addr) : "cc", "memory"); 372 return (ch >> (nr & 7)) & 1; 373} 374#define __test_and_clear_bit(X,Y) test_and_clear_bit_simple(X,Y) 375 376/* 377 * fast, non-SMP test_and_change_bit routine 378 */ 379static inline int 380test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr) 381{ 382 unsigned long addr; 383 unsigned char ch; 384 385 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 386 ch = *(unsigned char *) addr; 387 asm volatile( 388 " xc 0(1,%1),0(%2)" 389 : "=m" (*(char *) addr) 390 : "a" (addr), "a" (_oi_bitmap + (nr & 7)), 391 "m" (*(char *) addr) : "cc", "memory"); 392 return (ch >> (nr & 7)) & 1; 393} 394#define __test_and_change_bit(X,Y) test_and_change_bit_simple(X,Y) 395 396#ifdef CONFIG_SMP 397#define set_bit set_bit_cs 398#define clear_bit clear_bit_cs 399#define change_bit change_bit_cs 400#define test_and_set_bit test_and_set_bit_cs 401#define test_and_clear_bit test_and_clear_bit_cs 402#define test_and_change_bit test_and_change_bit_cs 403#else 404#define set_bit set_bit_simple 405#define clear_bit clear_bit_simple 406#define change_bit change_bit_simple 407#define test_and_set_bit test_and_set_bit_simple 408#define test_and_clear_bit test_and_clear_bit_simple 409#define test_and_change_bit test_and_change_bit_simple 410#endif 411 412 413/* 414 * This routine doesn't need to be atomic. 415 */ 416 417static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr) 418{ 419 unsigned long addr; 420 unsigned char ch; 421 422 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 423 ch = *(volatile unsigned char *) addr; 424 return (ch >> (nr & 7)) & 1; 425} 426 427static inline int 428__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) { 429 return (((volatile char *) addr) 430 [(nr^(__BITOPS_WORDSIZE-8))>>3] & (1<<(nr&7))) != 0; 431} 432 433#define test_bit(nr,addr) \ 434(__builtin_constant_p((nr)) ? \ 435 __constant_test_bit((nr),(addr)) : \ 436 __test_bit((nr),(addr)) ) 437 438/* 439 * ffz = Find First Zero in word. Undefined if no zero exists, 440 * so code should check against ~0UL first.. 441 */ 442static inline unsigned long ffz(unsigned long word) 443{ 444 unsigned long bit = 0; 445 446#ifdef __s390x__ 447 if (likely((word & 0xffffffff) == 0xffffffff)) { 448 word >>= 32; 449 bit += 32; 450 } 451#endif 452 if (likely((word & 0xffff) == 0xffff)) { 453 word >>= 16; 454 bit += 16; 455 } 456 if (likely((word & 0xff) == 0xff)) { 457 word >>= 8; 458 bit += 8; 459 } 460 return bit + _zb_findmap[word & 0xff]; 461} 462 463/* 464 * __ffs = find first bit in word. Undefined if no bit exists, 465 * so code should check against 0UL first.. 466 */ 467static inline unsigned long __ffs (unsigned long word) 468{ 469 unsigned long bit = 0; 470 471#ifdef __s390x__ 472 if (likely((word & 0xffffffff) == 0)) { 473 word >>= 32; 474 bit += 32; 475 } 476#endif 477 if (likely((word & 0xffff) == 0)) { 478 word >>= 16; 479 bit += 16; 480 } 481 if (likely((word & 0xff) == 0)) { 482 word >>= 8; 483 bit += 8; 484 } 485 return bit + _sb_findmap[word & 0xff]; 486} 487 488/* 489 * Find-bit routines.. 490 */ 491 492#ifndef __s390x__ 493 494static inline int 495find_first_zero_bit(const unsigned long * addr, unsigned long size) 496{ 497 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 498 unsigned long cmp, count; 499 unsigned int res; 500 501 if (!size) 502 return 0; 503 asm volatile( 504 " lhi %1,-1\n" 505 " lr %2,%3\n" 506 " slr %0,%0\n" 507 " ahi %2,31\n" 508 " srl %2,5\n" 509 "0: c %1,0(%0,%4)\n" 510 " jne 1f\n" 511 " la %0,4(%0)\n" 512 " brct %2,0b\n" 513 " lr %0,%3\n" 514 " j 4f\n" 515 "1: l %2,0(%0,%4)\n" 516 " sll %0,3\n" 517 " lhi %1,0xff\n" 518 " tml %2,0xffff\n" 519 " jno 2f\n" 520 " ahi %0,16\n" 521 " srl %2,16\n" 522 "2: tml %2,0x00ff\n" 523 " jno 3f\n" 524 " ahi %0,8\n" 525 " srl %2,8\n" 526 "3: nr %2,%1\n" 527 " ic %2,0(%2,%5)\n" 528 " alr %0,%2\n" 529 "4:" 530 : "=&a" (res), "=&d" (cmp), "=&a" (count) 531 : "a" (size), "a" (addr), "a" (&_zb_findmap), 532 "m" (*(addrtype *) addr) : "cc"); 533 return (res < size) ? res : size; 534} 535 536static inline int 537find_first_bit(const unsigned long * addr, unsigned long size) 538{ 539 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 540 unsigned long cmp, count; 541 unsigned int res; 542 543 if (!size) 544 return 0; 545 asm volatile( 546 " slr %1,%1\n" 547 " lr %2,%3\n" 548 " slr %0,%0\n" 549 " ahi %2,31\n" 550 " srl %2,5\n" 551 "0: c %1,0(%0,%4)\n" 552 " jne 1f\n" 553 " la %0,4(%0)\n" 554 " brct %2,0b\n" 555 " lr %0,%3\n" 556 " j 4f\n" 557 "1: l %2,0(%0,%4)\n" 558 " sll %0,3\n" 559 " lhi %1,0xff\n" 560 " tml %2,0xffff\n" 561 " jnz 2f\n" 562 " ahi %0,16\n" 563 " srl %2,16\n" 564 "2: tml %2,0x00ff\n" 565 " jnz 3f\n" 566 " ahi %0,8\n" 567 " srl %2,8\n" 568 "3: nr %2,%1\n" 569 " ic %2,0(%2,%5)\n" 570 " alr %0,%2\n" 571 "4:" 572 : "=&a" (res), "=&d" (cmp), "=&a" (count) 573 : "a" (size), "a" (addr), "a" (&_sb_findmap), 574 "m" (*(addrtype *) addr) : "cc"); 575 return (res < size) ? res : size; 576} 577 578#else /* __s390x__ */ 579 580static inline unsigned long 581find_first_zero_bit(const unsigned long * addr, unsigned long size) 582{ 583 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 584 unsigned long res, cmp, count; 585 586 if (!size) 587 return 0; 588 asm volatile( 589 " lghi %1,-1\n" 590 " lgr %2,%3\n" 591 " slgr %0,%0\n" 592 " aghi %2,63\n" 593 " srlg %2,%2,6\n" 594 "0: cg %1,0(%0,%4)\n" 595 " jne 1f\n" 596 " la %0,8(%0)\n" 597 " brct %2,0b\n" 598 " lgr %0,%3\n" 599 " j 5f\n" 600 "1: lg %2,0(%0,%4)\n" 601 " sllg %0,%0,3\n" 602 " clr %2,%1\n" 603 " jne 2f\n" 604 " aghi %0,32\n" 605 " srlg %2,%2,32\n" 606 "2: lghi %1,0xff\n" 607 " tmll %2,0xffff\n" 608 " jno 3f\n" 609 " aghi %0,16\n" 610 " srl %2,16\n" 611 "3: tmll %2,0x00ff\n" 612 " jno 4f\n" 613 " aghi %0,8\n" 614 " srl %2,8\n" 615 "4: ngr %2,%1\n" 616 " ic %2,0(%2,%5)\n" 617 " algr %0,%2\n" 618 "5:" 619 : "=&a" (res), "=&d" (cmp), "=&a" (count) 620 : "a" (size), "a" (addr), "a" (&_zb_findmap), 621 "m" (*(addrtype *) addr) : "cc"); 622 return (res < size) ? res : size; 623} 624 625static inline unsigned long 626find_first_bit(const unsigned long * addr, unsigned long size) 627{ 628 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 629 unsigned long res, cmp, count; 630 631 if (!size) 632 return 0; 633 asm volatile( 634 " slgr %1,%1\n" 635 " lgr %2,%3\n" 636 " slgr %0,%0\n" 637 " aghi %2,63\n" 638 " srlg %2,%2,6\n" 639 "0: cg %1,0(%0,%4)\n" 640 " jne 1f\n" 641 " aghi %0,8\n" 642 " brct %2,0b\n" 643 " lgr %0,%3\n" 644 " j 5f\n" 645 "1: lg %2,0(%0,%4)\n" 646 " sllg %0,%0,3\n" 647 " clr %2,%1\n" 648 " jne 2f\n" 649 " aghi %0,32\n" 650 " srlg %2,%2,32\n" 651 "2: lghi %1,0xff\n" 652 " tmll %2,0xffff\n" 653 " jnz 3f\n" 654 " aghi %0,16\n" 655 " srl %2,16\n" 656 "3: tmll %2,0x00ff\n" 657 " jnz 4f\n" 658 " aghi %0,8\n" 659 " srl %2,8\n" 660 "4: ngr %2,%1\n" 661 " ic %2,0(%2,%5)\n" 662 " algr %0,%2\n" 663 "5:" 664 : "=&a" (res), "=&d" (cmp), "=&a" (count) 665 : "a" (size), "a" (addr), "a" (&_sb_findmap), 666 "m" (*(addrtype *) addr) : "cc"); 667 return (res < size) ? res : size; 668} 669 670#endif /* __s390x__ */ 671 672static inline int 673find_next_zero_bit (const unsigned long * addr, unsigned long size, 674 unsigned long offset) 675{ 676 const unsigned long *p; 677 unsigned long bit, set; 678 679 if (offset >= size) 680 return size; 681 bit = offset & (__BITOPS_WORDSIZE - 1); 682 offset -= bit; 683 size -= offset; 684 p = addr + offset / __BITOPS_WORDSIZE; 685 if (bit) { 686 /* 687 * s390 version of ffz returns __BITOPS_WORDSIZE 688 * if no zero bit is present in the word. 689 */ 690 set = ffz(*p >> bit) + bit; 691 if (set >= size) 692 return size + offset; 693 if (set < __BITOPS_WORDSIZE) 694 return set + offset; 695 offset += __BITOPS_WORDSIZE; 696 size -= __BITOPS_WORDSIZE; 697 p++; 698 } 699 return offset + find_first_zero_bit(p, size); 700} 701 702static inline int 703find_next_bit (const unsigned long * addr, unsigned long size, 704 unsigned long offset) 705{ 706 const unsigned long *p; 707 unsigned long bit, set; 708 709 if (offset >= size) 710 return size; 711 bit = offset & (__BITOPS_WORDSIZE - 1); 712 offset -= bit; 713 size -= offset; 714 p = addr + offset / __BITOPS_WORDSIZE; 715 if (bit) { 716 /* 717 * s390 version of __ffs returns __BITOPS_WORDSIZE 718 * if no one bit is present in the word. 719 */ 720 set = __ffs(*p & (~0UL << bit)); 721 if (set >= size) 722 return size + offset; 723 if (set < __BITOPS_WORDSIZE) 724 return set + offset; 725 offset += __BITOPS_WORDSIZE; 726 size -= __BITOPS_WORDSIZE; 727 p++; 728 } 729 return offset + find_first_bit(p, size); 730} 731 732/* 733 * Every architecture must define this function. It's the fastest 734 * way of searching a 140-bit bitmap where the first 100 bits are 735 * unlikely to be set. It's guaranteed that at least one of the 140 736 * bits is cleared. 737 */ 738static inline int sched_find_first_bit(unsigned long *b) 739{ 740 return find_first_bit(b, 140); 741} 742 743#include <asm-generic/bitops/ffs.h> 744 745#include <asm-generic/bitops/fls.h> 746#include <asm-generic/bitops/fls64.h> 747 748#include <asm-generic/bitops/hweight.h> 749 750/* 751 * ATTENTION: intel byte ordering convention for ext2 and minix !! 752 * bit 0 is the LSB of addr; bit 31 is the MSB of addr; 753 * bit 32 is the LSB of (addr+4). 754 * That combined with the little endian byte order of Intel gives the 755 * following bit order in memory: 756 * 07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \ 757 * 23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24 758 */ 759 760#define ext2_set_bit(nr, addr) \ 761 __test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) 762#define ext2_set_bit_atomic(lock, nr, addr) \ 763 test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) 764#define ext2_clear_bit(nr, addr) \ 765 __test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) 766#define ext2_clear_bit_atomic(lock, nr, addr) \ 767 test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) 768#define ext2_test_bit(nr, addr) \ 769 test_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr) 770 771#ifndef __s390x__ 772 773static inline int 774ext2_find_first_zero_bit(void *vaddr, unsigned int size) 775{ 776 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 777 unsigned long cmp, count; 778 unsigned int res; 779 780 if (!size) 781 return 0; 782 asm volatile( 783 " lhi %1,-1\n" 784 " lr %2,%3\n" 785 " ahi %2,31\n" 786 " srl %2,5\n" 787 " slr %0,%0\n" 788 "0: cl %1,0(%0,%4)\n" 789 " jne 1f\n" 790 " ahi %0,4\n" 791 " brct %2,0b\n" 792 " lr %0,%3\n" 793 " j 4f\n" 794 "1: l %2,0(%0,%4)\n" 795 " sll %0,3\n" 796 " ahi %0,24\n" 797 " lhi %1,0xff\n" 798 " tmh %2,0xffff\n" 799 " jo 2f\n" 800 " ahi %0,-16\n" 801 " srl %2,16\n" 802 "2: tml %2,0xff00\n" 803 " jo 3f\n" 804 " ahi %0,-8\n" 805 " srl %2,8\n" 806 "3: nr %2,%1\n" 807 " ic %2,0(%2,%5)\n" 808 " alr %0,%2\n" 809 "4:" 810 : "=&a" (res), "=&d" (cmp), "=&a" (count) 811 : "a" (size), "a" (vaddr), "a" (&_zb_findmap), 812 "m" (*(addrtype *) vaddr) : "cc"); 813 return (res < size) ? res : size; 814} 815 816#else /* __s390x__ */ 817 818static inline unsigned long 819ext2_find_first_zero_bit(void *vaddr, unsigned long size) 820{ 821 typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype; 822 unsigned long res, cmp, count; 823 824 if (!size) 825 return 0; 826 asm volatile( 827 " lghi %1,-1\n" 828 " lgr %2,%3\n" 829 " aghi %2,63\n" 830 " srlg %2,%2,6\n" 831 " slgr %0,%0\n" 832 "0: clg %1,0(%0,%4)\n" 833 " jne 1f\n" 834 " aghi %0,8\n" 835 " brct %2,0b\n" 836 " lgr %0,%3\n" 837 " j 5f\n" 838 "1: cl %1,0(%0,%4)\n" 839 " jne 2f\n" 840 " aghi %0,4\n" 841 "2: l %2,0(%0,%4)\n" 842 " sllg %0,%0,3\n" 843 " aghi %0,24\n" 844 " lghi %1,0xff\n" 845 " tmlh %2,0xffff\n" 846 " jo 3f\n" 847 " aghi %0,-16\n" 848 " srl %2,16\n" 849 "3: tmll %2,0xff00\n" 850 " jo 4f\n" 851 " aghi %0,-8\n" 852 " srl %2,8\n" 853 "4: ngr %2,%1\n" 854 " ic %2,0(%2,%5)\n" 855 " algr %0,%2\n" 856 "5:" 857 : "=&a" (res), "=&d" (cmp), "=&a" (count) 858 : "a" (size), "a" (vaddr), "a" (&_zb_findmap), 859 "m" (*(addrtype *) vaddr) : "cc"); 860 return (res < size) ? res : size; 861} 862 863#endif /* __s390x__ */ 864 865static inline int 866ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset) 867{ 868 unsigned long *addr = vaddr, *p; 869 unsigned long word, bit, set; 870 871 if (offset >= size) 872 return size; 873 bit = offset & (__BITOPS_WORDSIZE - 1); 874 offset -= bit; 875 size -= offset; 876 p = addr + offset / __BITOPS_WORDSIZE; 877 if (bit) { 878#ifndef __s390x__ 879 asm volatile( 880 " ic %0,0(%1)\n" 881 " icm %0,2,1(%1)\n" 882 " icm %0,4,2(%1)\n" 883 " icm %0,8,3(%1)" 884 : "=&a" (word) : "a" (p), "m" (*p) : "cc"); 885#else 886 asm volatile( 887 " lrvg %0,%1" 888 : "=a" (word) : "m" (*p) ); 889#endif 890 /* 891 * s390 version of ffz returns __BITOPS_WORDSIZE 892 * if no zero bit is present in the word. 893 */ 894 set = ffz(word >> bit) + bit; 895 if (set >= size) 896 return size + offset; 897 if (set < __BITOPS_WORDSIZE) 898 return set + offset; 899 offset += __BITOPS_WORDSIZE; 900 size -= __BITOPS_WORDSIZE; 901 p++; 902 } 903 return offset + ext2_find_first_zero_bit(p, size); 904} 905 906#include <asm-generic/bitops/minix.h> 907 908#endif /* __KERNEL__ */ 909 910#endif /* _S390_BITOPS_H */