~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/include/asm-s390/bitops.h

Version: ~ [ linux-5.9.1 ] ~ [ linux-5.8.16 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.72 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.152 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.202 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.240 ] ~ [ linux-4.8.17 ] ~ [ linux-4.7.10 ] ~ [ linux-4.6.7 ] ~ [ linux-4.5.7 ] ~ [ linux-4.4.240 ] ~ [ linux-4.3.6 ] ~ [ linux-4.2.8 ] ~ [ linux-4.1.52 ] ~ [ linux-4.0.9 ] ~ [ linux-3.19.8 ] ~ [ linux-3.18.140 ] ~ [ linux-3.17.8 ] ~ [ linux-3.16.85 ] ~ [ linux-3.15.10 ] ~ [ linux-3.14.79 ] ~ [ linux-3.13.11 ] ~ [ linux-3.12.74 ] ~ [ linux-3.11.10 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.5 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 #ifndef _S390_BITOPS_H
  2 #define _S390_BITOPS_H
  3 
  4 /*
  5  *  include/asm-s390/bitops.h
  6  *
  7  *  S390 version
  8  *    Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
  9  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
 10  *
 11  *  Derived from "include/asm-i386/bitops.h"
 12  *    Copyright (C) 1992, Linus Torvalds
 13  *
 14  */
 15 #include <linux/config.h>
 16 
 17 /*
 18  * 32 bit bitops format:
 19  * bit 0 is the LSB of *addr; bit 31 is the MSB of *addr;
 20  * bit 32 is the LSB of *(addr+4). That combined with the
 21  * big endian byte order on S390 give the following bit
 22  * order in memory:
 23  *    1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 \
 24  *    0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00
 25  * after that follows the next long with bit numbers
 26  *    3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30
 27  *    2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20
 28  * The reason for this bit ordering is the fact that
 29  * in the architecture independent code bits operations
 30  * of the form "flags |= (1 << bitnr)" are used INTERMIXED
 31  * with operation of the form "set_bit(bitnr, flags)".
 32  *
 33  * 64 bit bitops format:
 34  * bit 0 is the LSB of *addr; bit 63 is the MSB of *addr;
 35  * bit 64 is the LSB of *(addr+8). That combined with the
 36  * big endian byte order on S390 give the following bit
 37  * order in memory:
 38  *    3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30
 39  *    2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20
 40  *    1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10
 41  *    0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00
 42  * after that follows the next long with bit numbers
 43  *    7f 7e 7d 7c 7b 7a 79 78 77 76 75 74 73 72 71 70
 44  *    6f 6e 6d 6c 6b 6a 69 68 67 66 65 64 63 62 61 60
 45  *    5f 5e 5d 5c 5b 5a 59 58 57 56 55 54 53 52 51 50
 46  *    4f 4e 4d 4c 4b 4a 49 48 47 46 45 44 43 42 41 40
 47  * The reason for this bit ordering is the fact that
 48  * in the architecture independent code bits operations
 49  * of the form "flags |= (1 << bitnr)" are used INTERMIXED
 50  * with operation of the form "set_bit(bitnr, flags)".
 51  */
 52 
 53 /* set ALIGN_CS to 1 if the SMP safe bit operations should
 54  * align the address to 4 byte boundary. It seems to work
 55  * without the alignment. 
 56  */
 57 #ifdef __KERNEL__
 58 #define ALIGN_CS 0
 59 #else
 60 #define ALIGN_CS 1
 61 #ifndef CONFIG_SMP
 62 #error "bitops won't work without CONFIG_SMP"
 63 #endif
 64 #endif
 65 
 66 /* bitmap tables from arch/S390/kernel/bitmap.S */
 67 extern const char _oi_bitmap[];
 68 extern const char _ni_bitmap[];
 69 extern const char _zb_findmap[];
 70 extern const char _sb_findmap[];
 71 
 72 #ifndef __s390x__
 73 
 74 #define __BITOPS_ALIGN          3
 75 #define __BITOPS_WORDSIZE       32
 76 #define __BITOPS_OR             "or"
 77 #define __BITOPS_AND            "nr"
 78 #define __BITOPS_XOR            "xr"
 79 
 80 #define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)         \
 81         __asm__ __volatile__("   l   %0,0(%4)\n"                        \
 82                              "0: lr  %1,%0\n"                           \
 83                              __op_string "  %1,%3\n"                    \
 84                              "   cs  %0,%1,0(%4)\n"                     \
 85                              "   jl  0b"                                \
 86                              : "=&d" (__old), "=&d" (__new),            \
 87                                "=m" (*(unsigned long *) __addr)         \
 88                              : "d" (__val), "a" (__addr),               \
 89                                "m" (*(unsigned long *) __addr) : "cc" );
 90 
 91 #else /* __s390x__ */
 92 
 93 #define __BITOPS_ALIGN          7
 94 #define __BITOPS_WORDSIZE       64
 95 #define __BITOPS_OR             "ogr"
 96 #define __BITOPS_AND            "ngr"
 97 #define __BITOPS_XOR            "xgr"
 98 
 99 #define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)         \
100         __asm__ __volatile__("   lg  %0,0(%4)\n"                        \
101                              "0: lgr %1,%0\n"                           \
102                              __op_string "  %1,%3\n"                    \
103                              "   csg %0,%1,0(%4)\n"                     \
104                              "   jl  0b"                                \
105                              : "=&d" (__old), "=&d" (__new),            \
106                                "=m" (*(unsigned long *) __addr)         \
107                              : "d" (__val), "a" (__addr),               \
108                                "m" (*(unsigned long *) __addr) : "cc" );
109 
110 #endif /* __s390x__ */
111 
112 #ifdef CONFIG_SMP
113 /*
114  * SMP safe set_bit routine based on compare and swap (CS)
115  */
116 static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
117 {
118         unsigned long addr, old, new, mask;
119 
120         addr = (unsigned long) ptr;
121 #if ALIGN_CS == 1
122         nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
123         addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
124 #endif
125         /* calculate address for CS */
126         addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
127         /* make OR mask */
128         mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
129         /* Do the atomic update. */
130         __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
131 }
132 
133 /*
134  * SMP safe clear_bit routine based on compare and swap (CS)
135  */
136 static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
137 {
138         unsigned long addr, old, new, mask;
139 
140         addr = (unsigned long) ptr;
141 #if ALIGN_CS == 1
142         nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
143         addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
144 #endif
145         /* calculate address for CS */
146         addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
147         /* make AND mask */
148         mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1)));
149         /* Do the atomic update. */
150         __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
151 }
152 
153 /*
154  * SMP safe change_bit routine based on compare and swap (CS)
155  */
156 static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
157 {
158         unsigned long addr, old, new, mask;
159 
160         addr = (unsigned long) ptr;
161 #if ALIGN_CS == 1
162         nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
163         addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
164 #endif
165         /* calculate address for CS */
166         addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
167         /* make XOR mask */
168         mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
169         /* Do the atomic update. */
170         __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
171 }
172 
173 /*
174  * SMP safe test_and_set_bit routine based on compare and swap (CS)
175  */
176 static inline int
177 test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
178 {
179         unsigned long addr, old, new, mask;
180 
181         addr = (unsigned long) ptr;
182 #if ALIGN_CS == 1
183         nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
184         addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
185 #endif
186         /* calculate address for CS */
187         addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
188         /* make OR/test mask */
189         mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
190         /* Do the atomic update. */
191         __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
192         return (old & mask) != 0;
193 }
194 
195 /*
196  * SMP safe test_and_clear_bit routine based on compare and swap (CS)
197  */
198 static inline int
199 test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
200 {
201         unsigned long addr, old, new, mask;
202 
203         addr = (unsigned long) ptr;
204 #if ALIGN_CS == 1
205         nr += (addr & __BITOPS_ALIGN) << 3;    /* add alignment to bit number */
206         addr ^= addr & __BITOPS_ALIGN;         /* align address to 8 */
207 #endif
208         /* calculate address for CS */
209         addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
210         /* make AND/test mask */
211         mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1)));
212         /* Do the atomic update. */
213         __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
214         return (old ^ new) != 0;
215 }
216 
217 /*
218  * SMP safe test_and_change_bit routine based on compare and swap (CS) 
219  */
220 static inline int
221 test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
222 {
223         unsigned long addr, old, new, mask;
224 
225         addr = (unsigned long) ptr;
226 #if ALIGN_CS == 1
227         nr += (addr & __BITOPS_ALIGN) << 3;  /* add alignment to bit number */
228         addr ^= addr & __BITOPS_ALIGN;       /* align address to 8 */
229 #endif
230         /* calculate address for CS */
231         addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
232         /* make XOR/test mask */
233         mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
234         /* Do the atomic update. */
235         __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
236         return (old & mask) != 0;
237 }
238 #endif /* CONFIG_SMP */
239 
240 /*
241  * fast, non-SMP set_bit routine
242  */
243 static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
244 {
245         unsigned long addr;
246 
247         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
248         asm volatile("oc 0(1,%1),0(%2)"
249                      : "=m" (*(char *) addr)
250                      : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
251                        "m" (*(char *) addr) : "cc" );
252 }
253 
254 static inline void 
255 __constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
256 {
257         unsigned long addr;
258 
259         addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
260         switch (nr&7) {
261         case 0:
262                 asm volatile ("oi 0(%1),0x01" : "=m" (*(char *) addr)
263                               : "a" (addr), "m" (*(char *) addr) : "cc" );
264                 break;
265         case 1:
266                 asm volatile ("oi 0(%1),0x02" : "=m" (*(char *) addr)
267                               : "a" (addr), "m" (*(char *) addr) : "cc" );
268                 break;
269         case 2:
270                 asm volatile ("oi 0(%1),0x04" : "=m" (*(char *) addr)
271                               : "a" (addr), "m" (*(char *) addr) : "cc" );
272                 break;
273         case 3:
274                 asm volatile ("oi 0(%1),0x08" : "=m" (*(char *) addr)
275                               : "a" (addr), "m" (*(char *) addr) : "cc" );
276                 break;
277         case 4:
278                 asm volatile ("oi 0(%1),0x10" : "=m" (*(char *) addr)
279                               : "a" (addr), "m" (*(char *) addr) : "cc" );
280                 break;
281         case 5:
282                 asm volatile ("oi 0(%1),0x20" : "=m" (*(char *) addr)
283                               : "a" (addr), "m" (*(char *) addr) : "cc" );
284                 break;
285         case 6:
286                 asm volatile ("oi 0(%1),0x40" : "=m" (*(char *) addr)
287                               : "a" (addr), "m" (*(char *) addr) : "cc" );
288                 break;
289         case 7:
290                 asm volatile ("oi 0(%1),0x80" : "=m" (*(char *) addr)
291                               : "a" (addr), "m" (*(char *) addr) : "cc" );
292                 break;
293         }
294 }
295 
296 #define set_bit_simple(nr,addr) \
297 (__builtin_constant_p((nr)) ? \
298  __constant_set_bit((nr),(addr)) : \
299  __set_bit((nr),(addr)) )
300 
301 /*
302  * fast, non-SMP clear_bit routine
303  */
304 static inline void 
305 __clear_bit(unsigned long nr, volatile unsigned long *ptr)
306 {
307         unsigned long addr;
308 
309         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
310         asm volatile("nc 0(1,%1),0(%2)"
311                      : "=m" (*(char *) addr)
312                      : "a" (addr), "a" (_ni_bitmap + (nr & 7)),
313                        "m" (*(char *) addr) : "cc" );
314 }
315 
316 static inline void 
317 __constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
318 {
319         unsigned long addr;
320 
321         addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
322         switch (nr&7) {
323         case 0:
324                 asm volatile ("ni 0(%1),0xFE" : "=m" (*(char *) addr)
325                               : "a" (addr), "m" (*(char *) addr) : "cc" );
326                 break;
327         case 1:
328                 asm volatile ("ni 0(%1),0xFD": "=m" (*(char *) addr)
329                               : "a" (addr), "m" (*(char *) addr) : "cc" );
330                 break;
331         case 2:
332                 asm volatile ("ni 0(%1),0xFB" : "=m" (*(char *) addr)
333                               : "a" (addr), "m" (*(char *) addr) : "cc" );
334                 break;
335         case 3:
336                 asm volatile ("ni 0(%1),0xF7" : "=m" (*(char *) addr)
337                               : "a" (addr), "m" (*(char *) addr) : "cc" );
338                 break;
339         case 4:
340                 asm volatile ("ni 0(%1),0xEF" : "=m" (*(char *) addr)
341                               : "a" (addr), "m" (*(char *) addr) : "cc" );
342                 break;
343         case 5:
344                 asm volatile ("ni 0(%1),0xDF" : "=m" (*(char *) addr)
345                               : "a" (addr), "m" (*(char *) addr) : "cc" );
346                 break;
347         case 6:
348                 asm volatile ("ni 0(%1),0xBF" : "=m" (*(char *) addr)
349                               : "a" (addr), "m" (*(char *) addr) : "cc" );
350                 break;
351         case 7:
352                 asm volatile ("ni 0(%1),0x7F" : "=m" (*(char *) addr)
353                               : "a" (addr), "m" (*(char *) addr) : "cc" );
354                 break;
355         }
356 }
357 
358 #define clear_bit_simple(nr,addr) \
359 (__builtin_constant_p((nr)) ? \
360  __constant_clear_bit((nr),(addr)) : \
361  __clear_bit((nr),(addr)) )
362 
363 /* 
364  * fast, non-SMP change_bit routine 
365  */
366 static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
367 {
368         unsigned long addr;
369 
370         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
371         asm volatile("xc 0(1,%1),0(%2)"
372                      :  "=m" (*(char *) addr)
373                      : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
374                        "m" (*(char *) addr) : "cc" );
375 }
376 
377 static inline void 
378 __constant_change_bit(const unsigned long nr, volatile unsigned long *ptr) 
379 {
380         unsigned long addr;
381 
382         addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
383         switch (nr&7) {
384         case 0:
385                 asm volatile ("xi 0(%1),0x01" : "=m" (*(char *) addr)
386                               : "a" (addr), "m" (*(char *) addr) : "cc" );
387                 break;
388         case 1:
389                 asm volatile ("xi 0(%1),0x02" : "=m" (*(char *) addr)
390                               : "a" (addr), "m" (*(char *) addr) : "cc" );
391                 break;
392         case 2:
393                 asm volatile ("xi 0(%1),0x04" : "=m" (*(char *) addr)
394                               : "a" (addr), "m" (*(char *) addr) : "cc" );
395                 break;
396         case 3:
397                 asm volatile ("xi 0(%1),0x08" : "=m" (*(char *) addr)
398                               : "a" (addr), "m" (*(char *) addr) : "cc" );
399                 break;
400         case 4:
401                 asm volatile ("xi 0(%1),0x10" : "=m" (*(char *) addr)
402                               : "a" (addr), "m" (*(char *) addr) : "cc" );
403                 break;
404         case 5:
405                 asm volatile ("xi 0(%1),0x20" : "=m" (*(char *) addr)
406                               : "a" (addr), "m" (*(char *) addr) : "cc" );
407                 break;
408         case 6:
409                 asm volatile ("xi 0(%1),0x40" : "=m" (*(char *) addr)
410                               : "a" (addr), "m" (*(char *) addr) : "cc" );
411                 break;
412         case 7:
413                 asm volatile ("xi 0(%1),0x80" : "=m" (*(char *) addr)
414                               : "a" (addr), "m" (*(char *) addr) : "cc" );
415                 break;
416         }
417 }
418 
419 #define change_bit_simple(nr,addr) \
420 (__builtin_constant_p((nr)) ? \
421  __constant_change_bit((nr),(addr)) : \
422  __change_bit((nr),(addr)) )
423 
424 /*
425  * fast, non-SMP test_and_set_bit routine
426  */
427 static inline int
428 test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr)
429 {
430         unsigned long addr;
431         unsigned char ch;
432 
433         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
434         ch = *(unsigned char *) addr;
435         asm volatile("oc 0(1,%1),0(%2)"
436                      : "=m" (*(char *) addr)
437                      : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
438                        "m" (*(char *) addr) : "cc" );
439         return (ch >> (nr & 7)) & 1;
440 }
441 #define __test_and_set_bit(X,Y)         test_and_set_bit_simple(X,Y)
442 
443 /*
444  * fast, non-SMP test_and_clear_bit routine
445  */
446 static inline int
447 test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr)
448 {
449         unsigned long addr;
450         unsigned char ch;
451 
452         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
453         ch = *(unsigned char *) addr;
454         asm volatile("nc 0(1,%1),0(%2)"
455                      : "=m" (*(char *) addr)
456                      : "a" (addr), "a" (_ni_bitmap + (nr & 7)),
457                        "m" (*(char *) addr) : "cc" );
458         return (ch >> (nr & 7)) & 1;
459 }
460 #define __test_and_clear_bit(X,Y)       test_and_clear_bit_simple(X,Y)
461 
462 /*
463  * fast, non-SMP test_and_change_bit routine
464  */
465 static inline int
466 test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr)
467 {
468         unsigned long addr;
469         unsigned char ch;
470 
471         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
472         ch = *(unsigned char *) addr;
473         asm volatile("xc 0(1,%1),0(%2)"
474                      : "=m" (*(char *) addr)
475                      : "a" (addr), "a" (_oi_bitmap + (nr & 7)),
476                        "m" (*(char *) addr) : "cc" );
477         return (ch >> (nr & 7)) & 1;
478 }
479 #define __test_and_change_bit(X,Y)      test_and_change_bit_simple(X,Y)
480 
481 #ifdef CONFIG_SMP
482 #define set_bit             set_bit_cs
483 #define clear_bit           clear_bit_cs
484 #define change_bit          change_bit_cs
485 #define test_and_set_bit    test_and_set_bit_cs
486 #define test_and_clear_bit  test_and_clear_bit_cs
487 #define test_and_change_bit test_and_change_bit_cs
488 #else
489 #define set_bit             set_bit_simple
490 #define clear_bit           clear_bit_simple
491 #define change_bit          change_bit_simple
492 #define test_and_set_bit    test_and_set_bit_simple
493 #define test_and_clear_bit  test_and_clear_bit_simple
494 #define test_and_change_bit test_and_change_bit_simple
495 #endif
496 
497 
498 /*
499  * This routine doesn't need to be atomic.
500  */
501 
502 static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr)
503 {
504         unsigned long addr;
505         unsigned char ch;
506 
507         addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
508         ch = *(volatile unsigned char *) addr;
509         return (ch >> (nr & 7)) & 1;
510 }
511 
512 static inline int 
513 __constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
514     return (((volatile char *) addr)
515             [(nr^(__BITOPS_WORDSIZE-8))>>3] & (1<<(nr&7)));
516 }
517 
518 #define test_bit(nr,addr) \
519 (__builtin_constant_p((nr)) ? \
520  __constant_test_bit((nr),(addr)) : \
521  __test_bit((nr),(addr)) )
522 
523 #ifndef __s390x__
524 
525 /*
526  * Find-bit routines..
527  */
528 static inline int
529 find_first_zero_bit(unsigned long * addr, unsigned size)
530 {
531         unsigned long cmp, count;
532         int res;
533 
534         if (!size)
535                 return 0;
536         __asm__("   lhi  %1,-1\n"
537                 "   lr   %2,%3\n"
538                 "   slr  %0,%0\n"
539                 "   ahi  %2,31\n"
540                 "   srl  %2,5\n"
541                 "0: c    %1,0(%0,%4)\n"
542                 "   jne  1f\n"
543                 "   ahi  %0,4\n"
544                 "   brct %2,0b\n"
545                 "   lr   %0,%3\n"
546                 "   j    4f\n"
547                 "1: l    %2,0(%0,%4)\n"
548                 "   sll  %0,3\n"
549                 "   lhi  %1,0xff\n"
550                 "   tml  %2,0xffff\n"
551                 "   jno  2f\n"
552                 "   ahi  %0,16\n"
553                 "   srl  %2,16\n"
554                 "2: tml  %2,0x00ff\n"
555                 "   jno  3f\n"
556                 "   ahi  %0,8\n"
557                 "   srl  %2,8\n"
558                 "3: nr   %2,%1\n"
559                 "   ic   %2,0(%2,%5)\n"
560                 "   alr  %0,%2\n"
561                 "4:"
562                 : "=&a" (res), "=&d" (cmp), "=&a" (count)
563                 : "a" (size), "a" (addr), "a" (&_zb_findmap) : "cc" );
564         return (res < size) ? res : size;
565 }
566 
567 static inline int
568 find_first_bit(unsigned long * addr, unsigned size)
569 {
570         unsigned long cmp, count;
571         int res;
572 
573         if (!size)
574                 return 0;
575         __asm__("   slr  %1,%1\n"
576                 "   lr   %2,%3\n"
577                 "   slr  %0,%0\n"
578                 "   ahi  %2,31\n"
579                 "   srl  %2,5\n"
580                 "0: c    %1,0(%0,%4)\n"
581                 "   jne  1f\n"
582                 "   ahi  %0,4\n"
583                 "   brct %2,0b\n"
584                 "   lr   %0,%3\n"
585                 "   j    4f\n"
586                 "1: l    %2,0(%0,%4)\n"
587                 "   sll  %0,3\n"
588                 "   lhi  %1,0xff\n"
589                 "   tml  %2,0xffff\n"
590                 "   jnz  2f\n"
591                 "   ahi  %0,16\n"
592                 "   srl  %2,16\n"
593                 "2: tml  %2,0x00ff\n"
594                 "   jnz  3f\n"
595                 "   ahi  %0,8\n"
596                 "   srl  %2,8\n"
597                 "3: nr   %2,%1\n"
598                 "   ic   %2,0(%2,%5)\n"
599                 "   alr  %0,%2\n"
600                 "4:"
601                 : "=&a" (res), "=&d" (cmp), "=&a" (count)
602                 : "a" (size), "a" (addr), "a" (&_sb_findmap) : "cc" );
603         return (res < size) ? res : size;
604 }
605 
606 static inline int
607 find_next_zero_bit (unsigned long * addr, int size, int offset)
608 {
609         unsigned long * p = ((unsigned long *) addr) + (offset >> 5);
610         unsigned long bitvec, reg;
611         int set, bit = offset & 31, res;
612 
613         if (bit) {
614                 /*
615                  * Look for zero in first word
616                  */
617                 bitvec = (*p) >> bit;
618                 __asm__("   slr  %0,%0\n"
619                         "   lhi  %2,0xff\n"
620                         "   tml  %1,0xffff\n"
621                         "   jno  0f\n"
622                         "   ahi  %0,16\n"
623                         "   srl  %1,16\n"
624                         "0: tml  %1,0x00ff\n"
625                         "   jno  1f\n"
626                         "   ahi  %0,8\n"
627                         "   srl  %1,8\n"
628                         "1: nr   %1,%2\n"
629                         "   ic   %1,0(%1,%3)\n"
630                         "   alr  %0,%1"
631                         : "=&d" (set), "+a" (bitvec), "=&d" (reg)
632                         : "a" (&_zb_findmap) : "cc" );
633                 if (set < (32 - bit))
634                         return set + offset;
635                 offset += 32 - bit;
636                 p++;
637         }
638         /*
639          * No zero yet, search remaining full words for a zero
640          */
641         res = find_first_zero_bit (p, size - 32 * (p - (unsigned long *) addr));
642         return (offset + res);
643 }
644 
645 static inline int
646 find_next_bit (unsigned long * addr, int size, int offset)
647 {
648         unsigned long * p = ((unsigned long *) addr) + (offset >> 5);
649         unsigned long bitvec, reg;
650         int set, bit = offset & 31, res;
651 
652         if (bit) {
653                 /*
654                  * Look for set bit in first word
655                  */
656                 bitvec = (*p) >> bit;
657                 __asm__("   slr  %0,%0\n"
658                         "   lhi  %2,0xff\n"
659                         "   tml  %1,0xffff\n"
660                         "   jnz  0f\n"
661                         "   ahi  %0,16\n"
662                         "   srl  %1,16\n"
663                         "0: tml  %1,0x00ff\n"
664                         "   jnz  1f\n"
665                         "   ahi  %0,8\n"
666                         "   srl  %1,8\n"
667                         "1: nr   %1,%2\n"
668                         "   ic   %1,0(%1,%3)\n"
669                         "   alr  %0,%1"
670                         : "=&d" (set), "+a" (bitvec), "=&d" (reg)
671                         : "a" (&_sb_findmap) : "cc" );
672                 if (set < (32 - bit))
673                         return set + offset;
674                 offset += 32 - bit;
675                 p++;
676         }
677         /*
678          * No set bit yet, search remaining full words for a bit
679          */
680         res = find_first_bit (p, size - 32 * (p - (unsigned long *) addr));
681         return (offset + res);
682 }
683 
684 /*
685  * ffz = Find First Zero in word. Undefined if no zero exists,
686  * so code should check against ~0UL first..
687  */
688 static inline unsigned long ffz(unsigned long word)
689 {
690         unsigned long reg;
691         int result;
692 
693         __asm__("   slr  %0,%0\n"
694                 "   lhi  %2,0xff\n"
695                 "   tml  %1,0xffff\n"
696                 "   jno  0f\n"
697                 "   ahi  %0,16\n"
698                 "   srl  %1,16\n"
699                 "0: tml  %1,0x00ff\n"
700                 "   jno  1f\n"
701                 "   ahi  %0,8\n"
702                 "   srl  %1,8\n"
703                 "1: nr   %1,%2\n"
704                 "   ic   %1,0(%1,%3)\n"
705                 "   alr  %0,%1"
706                 : "=&d" (result), "+a" (word), "=&d" (reg)
707                 : "a" (&_zb_findmap) : "cc" );
708         return result;
709 }
710 
711 /*
712  * __ffs = find first bit in word. Undefined if no bit exists,
713  * so code should check against 0UL first..
714  */
715 static inline unsigned long __ffs (unsigned long word)
716 {
717         unsigned long reg, result;
718 
719         __asm__("   slr  %0,%0\n"
720                 "   lhi  %2,0xff\n"
721                 "   tml  %1,0xffff\n"
722                 "   jnz  0f\n"
723                 "   ahi  %0,16\n"
724                 "   srl  %1,16\n"
725                 "0: tml  %1,0x00ff\n"
726                 "   jnz  1f\n"
727                 "   ahi  %0,8\n"
728                 "   srl  %1,8\n"
729                 "1: nr   %1,%2\n"
730                 "   ic   %1,0(%1,%3)\n"
731                 "   alr  %0,%1"
732                 : "=&d" (result), "+a" (word), "=&d" (reg)
733                 : "a" (&_sb_findmap) : "cc" );
734         return result;
735 }
736 
737 #else /* __s390x__ */
738 
739 /*
740  * Find-bit routines..
741  */
742 static inline unsigned long
743 find_first_zero_bit(unsigned long * addr, unsigned long size)
744 {
745         unsigned long res, cmp, count;
746 
747         if (!size)
748                 return 0;
749         __asm__("   lghi  %1,-1\n"
750                 "   lgr   %2,%3\n"
751                 "   slgr  %0,%0\n"
752                 "   aghi  %2,63\n"
753                 "   srlg  %2,%2,6\n"
754                 "0: cg    %1,0(%0,%4)\n"
755                 "   jne   1f\n"
756                 "   aghi  %0,8\n"
757                 "   brct  %2,0b\n"
758                 "   lgr   %0,%3\n"
759                 "   j     5f\n"
760                 "1: lg    %2,0(%0,%4)\n"
761                 "   sllg  %0,%0,3\n"
762                 "   clr   %2,%1\n"
763                 "   jne   2f\n"
764                 "   aghi  %0,32\n"
765                 "   srlg  %2,%2,32\n"
766                 "2: lghi  %1,0xff\n"
767                 "   tmll  %2,0xffff\n"
768                 "   jno   3f\n"
769                 "   aghi  %0,16\n"
770                 "   srl   %2,16\n"
771                 "3: tmll  %2,0x00ff\n"
772                 "   jno   4f\n"
773                 "   aghi  %0,8\n"
774                 "   srl   %2,8\n"
775                 "4: ngr   %2,%1\n"
776                 "   ic    %2,0(%2,%5)\n"
777                 "   algr  %0,%2\n"
778                 "5:"
779                 : "=&a" (res), "=&d" (cmp), "=&a" (count)
780                 : "a" (size), "a" (addr), "a" (&_zb_findmap) : "cc" );
781         return (res < size) ? res : size;
782 }
783 
784 static inline unsigned long
785 find_first_bit(unsigned long * addr, unsigned long size)
786 {
787         unsigned long res, cmp, count;
788 
789         if (!size)
790                 return 0;
791         __asm__("   slgr  %1,%1\n"
792                 "   lgr   %2,%3\n"
793                 "   slgr  %0,%0\n"
794                 "   aghi  %2,63\n"
795                 "   srlg  %2,%2,6\n"
796                 "0: cg    %1,0(%0,%4)\n"
797                 "   jne   1f\n"
798                 "   aghi  %0,8\n"
799                 "   brct  %2,0b\n"
800                 "   lgr   %0,%3\n"
801                 "   j     5f\n"
802                 "1: lg    %2,0(%0,%4)\n"
803                 "   sllg  %0,%0,3\n"
804                 "   clr   %2,%1\n"
805                 "   jne   2f\n"
806                 "   aghi  %0,32\n"
807                 "   srlg  %2,%2,32\n"
808                 "2: lghi  %1,0xff\n"
809                 "   tmll  %2,0xffff\n"
810                 "   jnz   3f\n"
811                 "   aghi  %0,16\n"
812                 "   srl   %2,16\n"
813                 "3: tmll  %2,0x00ff\n"
814                 "   jnz   4f\n"
815                 "   aghi  %0,8\n"
816                 "   srl   %2,8\n"
817                 "4: ngr   %2,%1\n"
818                 "   ic    %2,0(%2,%5)\n"
819                 "   algr  %0,%2\n"
820                 "5:"
821                 : "=&a" (res), "=&d" (cmp), "=&a" (count)
822                 : "a" (size), "a" (addr), "a" (&_sb_findmap) : "cc" );
823         return (res < size) ? res : size;
824 }
825 
826 static inline unsigned long
827 find_next_zero_bit (unsigned long * addr, unsigned long size, unsigned long offset)
828 {
829         unsigned long * p = ((unsigned long *) addr) + (offset >> 6);
830         unsigned long bitvec, reg;
831         unsigned long set, bit = offset & 63, res;
832 
833         if (bit) {
834                 /*
835                  * Look for zero in first word
836                  */
837                 bitvec = (*p) >> bit;
838                 __asm__("   lhi  %2,-1\n"
839                         "   slgr %0,%0\n"
840                         "   clr  %1,%2\n"
841                         "   jne  0f\n"
842                         "   aghi %0,32\n"
843                         "   srlg %1,%1,32\n"
844                         "0: lghi %2,0xff\n"
845                         "   tmll %1,0xffff\n"
846                         "   jno  1f\n"
847                         "   aghi %0,16\n"
848                         "   srlg %1,%1,16\n"
849                         "1: tmll %1,0x00ff\n"
850                         "   jno  2f\n"
851                         "   aghi %0,8\n"
852                         "   srlg %1,%1,8\n"
853                         "2: ngr  %1,%2\n"
854                         "   ic   %1,0(%1,%3)\n"
855                         "   algr %0,%1"
856                         : "=&d" (set), "+a" (bitvec), "=&d" (reg)
857                         : "a" (&_zb_findmap) : "cc" );
858                 if (set < (64 - bit))
859                         return set + offset;
860                 offset += 64 - bit;
861                 p++;
862         }
863         /*
864          * No zero yet, search remaining full words for a zero
865          */
866         res = find_first_zero_bit (p, size - 64 * (p - (unsigned long *) addr));
867         return (offset + res);
868 }
869 
870 static inline unsigned long
871 find_next_bit (unsigned long * addr, unsigned long size, unsigned long offset)
872 {
873         unsigned long * p = ((unsigned long *) addr) + (offset >> 6);
874         unsigned long bitvec, reg;
875         unsigned long set, bit = offset & 63, res;
876 
877         if (bit) {
878                 /*
879                  * Look for zero in first word
880                  */
881                 bitvec = (*p) >> bit;
882                 __asm__("   slgr %0,%0\n"
883                         "   ltr  %1,%1\n"
884                         "   jnz  0f\n"
885                         "   aghi %0,32\n"
886                         "   srlg %1,%1,32\n"
887                         "0: lghi %2,0xff\n"
888                         "   tmll %1,0xffff\n"
889                         "   jnz  1f\n"
890                         "   aghi %0,16\n"
891                         "   srlg %1,%1,16\n"
892                         "1: tmll %1,0x00ff\n"
893                         "   jnz  2f\n"
894                         "   aghi %0,8\n"
895                         "   srlg %1,%1,8\n"
896                         "2: ngr  %1,%2\n"
897                         "   ic   %1,0(%1,%3)\n"
898                         "   algr %0,%1"
899                         : "=&d" (set), "+a" (bitvec), "=&d" (reg)
900                         : "a" (&_sb_findmap) : "cc" );
901                 if (set < (64 - bit))
902                         return set + offset;
903                 offset += 64 - bit;
904                 p++;
905         }
906         /*
907          * No set bit yet, search remaining full words for a bit
908          */
909         res = find_first_bit (p, size - 64 * (p - (unsigned long *) addr));
910         return (offset + res);
911 }
912 
913 /*
914  * ffz = Find First Zero in word. Undefined if no zero exists,
915  * so code should check against ~0UL first..
916  */
917 static inline unsigned long ffz(unsigned long word)
918 {
919         unsigned long reg, result;
920 
921         __asm__("   lhi  %2,-1\n"
922                 "   slgr %0,%0\n"
923                 "   clr  %1,%2\n"
924                 "   jne  0f\n"
925                 "   aghi %0,32\n"
926                 "   srlg %1,%1,32\n"
927                 "0: lghi %2,0xff\n"
928                 "   tmll %1,0xffff\n"
929                 "   jno  1f\n"
930                 "   aghi %0,16\n"
931                 "   srlg %1,%1,16\n"
932                 "1: tmll %1,0x00ff\n"
933                 "   jno  2f\n"
934                 "   aghi %0,8\n"
935                 "   srlg %1,%1,8\n"
936                 "2: ngr  %1,%2\n"
937                 "   ic   %1,0(%1,%3)\n"
938                 "   algr %0,%1"
939                 : "=&d" (result), "+a" (word), "=&d" (reg)
940                 : "a" (&_zb_findmap) : "cc" );
941         return result;
942 }
943 
944 /*
945  * __ffs = find first bit in word. Undefined if no bit exists,
946  * so code should check against 0UL first..
947  */
948 static inline unsigned long __ffs (unsigned long word)
949 {
950         unsigned long reg, result;
951 
952         __asm__("   slgr %0,%0\n"
953                 "   ltr  %1,%1\n"
954                 "   jnz  0f\n"
955                 "   aghi %0,32\n"
956                 "   srlg %1,%1,32\n"
957                 "0: lghi %2,0xff\n"
958                 "   tmll %1,0xffff\n"
959                 "   jnz  1f\n"
960                 "   aghi %0,16\n"
961                 "   srlg %1,%1,16\n"
962                 "1: tmll %1,0x00ff\n"
963                 "   jnz  2f\n"
964                 "   aghi %0,8\n"
965                 "   srlg %1,%1,8\n"
966                 "2: ngr  %1,%2\n"
967                 "   ic   %1,0(%1,%3)\n"
968                 "   algr %0,%1"
969                 : "=&d" (result), "+a" (word), "=&d" (reg)
970                 : "a" (&_sb_findmap) : "cc" );
971         return result;
972 }
973 
974 #endif /* __s390x__ */
975 
976 /*
977  * Every architecture must define this function. It's the fastest
978  * way of searching a 140-bit bitmap where the first 100 bits are
979  * unlikely to be set. It's guaranteed that at least one of the 140
980  * bits is cleared.
981  */
982 static inline int sched_find_first_bit(unsigned long *b)
983 {
984         return find_first_bit(b, 140);
985 }
986 
987 /*
988  * ffs: find first bit set. This is defined the same way as
989  * the libc and compiler builtin ffs routines, therefore
990  * differs in spirit from the above ffz (man ffs).
991  */
992 extern inline int ffs (int x)
993 {
994         int r = 1;
995 
996         if (x == 0)
997                 return 0;
998         __asm__("    tml  %1,0xffff\n"
999                 "    jnz  0f\n"
1000                 "    srl  %1,16\n"
1001                 "    ahi  %0,16\n"
1002                 "0:  tml  %1,0x00ff\n"
1003                 "    jnz  1f\n"
1004                 "    srl  %1,8\n"
1005                 "    ahi  %0,8\n"
1006                 "1:  tml  %1,0x000f\n"
1007                 "    jnz  2f\n"
1008                 "    srl  %1,4\n"
1009                 "    ahi  %0,4\n"
1010                 "2:  tml  %1,0x0003\n"
1011                 "    jnz  3f\n"
1012                 "    srl  %1,2\n"
1013                 "    ahi  %0,2\n"
1014                 "3:  tml  %1,0x0001\n"
1015                 "    jnz  4f\n"
1016                 "    ahi  %0,1\n"
1017                 "4:"
1018                 : "=&d" (r), "+d" (x) : : "cc" );
1019         return r;
1020 }
1021 
1022 /*
1023  * fls: find last bit set.
1024  */
1025 extern __inline__ int fls(int x)
1026 {
1027         int r = 32;
1028 
1029         if (x == 0)
1030                 return 0;
1031         __asm__("    tmh  %1,0xffff\n"
1032                 "    jz   0f\n"
1033                 "    sll  %1,16\n"
1034                 "    ahi  %0,-16\n"
1035                 "0:  tmh  %1,0xff00\n"
1036                 "    jz   1f\n"
1037                 "    sll  %1,8\n"
1038                 "    ahi  %0,-8\n"
1039                 "1:  tmh  %1,0xf000\n"
1040                 "    jz   2f\n"
1041                 "    sll  %1,4\n"
1042                 "    ahi  %0,-4\n"
1043                 "2:  tmh  %1,0xc000\n"
1044                 "    jz   3f\n"
1045                 "    sll  %1,2\n"
1046                 "    ahi  %0,-2\n"
1047                 "3:  tmh  %1,0x8000\n"
1048                 "    jz   4f\n"
1049                 "    ahi  %0,-1\n"
1050                 "4:"
1051                 : "+d" (r), "+d" (x) : : "cc" );
1052         return r;
1053 }
1054 
1055 /*
1056  * hweightN: returns the hamming weight (i.e. the number
1057  * of bits set) of a N-bit word
1058  */
1059 #define hweight64(x)                                            \
1060 ({                                                              \
1061         unsigned long __x = (x);                                \
1062         unsigned int __w;                                       \
1063         __w = generic_hweight32((unsigned int) __x);            \
1064         __w += generic_hweight32((unsigned int) (__x>>32));     \
1065         __w;                                                    \
1066 })
1067 #define hweight32(x) generic_hweight32(x)
1068 #define hweight16(x) generic_hweight16(x)
1069 #define hweight8(x) generic_hweight8(x)
1070 
1071 
1072 #ifdef __KERNEL__
1073 
1074 /*
1075  * ATTENTION: intel byte ordering convention for ext2 and minix !!
1076  * bit 0 is the LSB of addr; bit 31 is the MSB of addr;
1077  * bit 32 is the LSB of (addr+4).
1078  * That combined with the little endian byte order of Intel gives the
1079  * following bit order in memory:
1080  *    07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \
1081  *    23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24
1082  */
1083 
1084 #define ext2_set_bit(nr, addr)       \
1085         test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1086 #define ext2_set_bit_atomic(lock, nr, addr)       \
1087         test_and_set_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1088 #define ext2_clear_bit(nr, addr)     \
1089         test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1090 #define ext2_clear_bit_atomic(lock, nr, addr)     \
1091         test_and_clear_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1092 #define ext2_test_bit(nr, addr)      \
1093         test_bit((nr)^(__BITOPS_WORDSIZE - 8), (unsigned long *)addr)
1094 
1095 #ifndef __s390x__
1096 
1097 static inline int 
1098 ext2_find_first_zero_bit(void *vaddr, unsigned size)
1099 {
1100         unsigned long cmp, count;
1101         int res;
1102 
1103         if (!size)
1104                 return 0;
1105         __asm__("   lhi  %1,-1\n"
1106                 "   lr   %2,%3\n"
1107                 "   ahi  %2,31\n"
1108                 "   srl  %2,5\n"
1109                 "   slr  %0,%0\n"
1110                 "0: cl   %1,0(%0,%4)\n"
1111                 "   jne  1f\n"
1112                 "   ahi  %0,4\n"
1113                 "   brct %2,0b\n"
1114                 "   lr   %0,%3\n"
1115                 "   j    4f\n"
1116                 "1: l    %2,0(%0,%4)\n"
1117                 "   sll  %0,3\n"
1118                 "   ahi  %0,24\n"
1119                 "   lhi  %1,0xff\n"
1120                 "   tmh  %2,0xffff\n"
1121                 "   jo   2f\n"
1122                 "   ahi  %0,-16\n"
1123                 "   srl  %2,16\n"
1124                 "2: tml  %2,0xff00\n"
1125                 "   jo   3f\n"
1126                 "   ahi  %0,-8\n"
1127                 "   srl  %2,8\n"
1128                 "3: nr   %2,%1\n"
1129                 "   ic   %2,0(%2,%5)\n"
1130                 "   alr  %0,%2\n"
1131                 "4:"
1132                 : "=&a" (res), "=&d" (cmp), "=&a" (count)
1133                 : "a" (size), "a" (vaddr), "a" (&_zb_findmap) : "cc" );
1134         return (res < size) ? res : size;
1135 }
1136 
1137 static inline int 
1138 ext2_find_next_zero_bit(void *vaddr, unsigned size, unsigned offset)
1139 {
1140         unsigned long *addr = vaddr;
1141         unsigned long *p = addr + (offset >> 5);
1142         unsigned long word, reg;
1143         int bit = offset & 31UL, res;
1144 
1145         if (offset >= size)
1146                 return size;
1147 
1148         if (bit) {
1149                 __asm__("   ic   %0,0(%1)\n"
1150                         "   icm  %0,2,1(%1)\n"
1151                         "   icm  %0,4,2(%1)\n"
1152                         "   icm  %0,8,3(%1)"
1153                         : "=&a" (word) : "a" (p) : "cc" );
1154                 word >>= bit;
1155                 res = bit;
1156                 /* Look for zero in first longword */
1157                 __asm__("   lhi  %2,0xff\n"
1158                         "   tml  %1,0xffff\n"
1159                         "   jno  0f\n"
1160                         "   ahi  %0,16\n"
1161                         "   srl  %1,16\n"
1162                         "0: tml  %1,0x00ff\n"
1163                         "   jno  1f\n"
1164                         "   ahi  %0,8\n"
1165                         "   srl  %1,8\n"
1166                         "1: nr   %1,%2\n"
1167                         "   ic   %1,0(%1,%3)\n"
1168                         "   alr  %0,%1"
1169                         : "+&d" (res), "+&a" (word), "=&d" (reg)
1170                         : "a" (&_zb_findmap) : "cc" );
1171                 if (res < 32)
1172                         return (p - addr)*32 + res;
1173                 p++;
1174         }
1175         /* No zero yet, search remaining full bytes for a zero */
1176         res = ext2_find_first_zero_bit (p, size - 32 * (p - addr));
1177         return (p - addr) * 32 + res;
1178 }
1179 
1180 #else /* __s390x__ */
1181 
1182 static inline unsigned long
1183 ext2_find_first_zero_bit(void *vaddr, unsigned long size)
1184 {
1185         unsigned long res, cmp, count;
1186 
1187         if (!size)
1188                 return 0;
1189         __asm__("   lghi  %1,-1\n"
1190                 "   lgr   %2,%3\n"
1191                 "   aghi  %2,63\n"
1192                 "   srlg  %2,%2,6\n"
1193                 "   slgr  %0,%0\n"
1194                 "0: clg   %1,0(%0,%4)\n"
1195                 "   jne   1f\n"
1196                 "   aghi  %0,8\n"
1197                 "   brct  %2,0b\n"
1198                 "   lgr   %0,%3\n"
1199                 "   j     5f\n"
1200                 "1: cl    %1,0(%0,%4)\n"
1201                 "   jne   2f\n"
1202                 "   aghi  %0,4\n"
1203                 "2: l     %2,0(%0,%4)\n"
1204                 "   sllg  %0,%0,3\n"
1205                 "   aghi  %0,24\n"
1206                 "   lghi  %1,0xff\n"
1207                 "   tmlh  %2,0xffff\n"
1208                 "   jo    3f\n"
1209                 "   aghi  %0,-16\n"
1210                 "   srl   %2,16\n"
1211                 "3: tmll  %2,0xff00\n"
1212                 "   jo    4f\n"
1213                 "   aghi  %0,-8\n"
1214                 "   srl   %2,8\n"
1215                 "4: ngr   %2,%1\n"
1216                 "   ic    %2,0(%2,%5)\n"
1217                 "   algr  %0,%2\n"
1218                 "5:"
1219                 : "=&a" (res), "=&d" (cmp), "=&a" (count)
1220                 : "a" (size), "a" (vaddr), "a" (&_zb_findmap) : "cc" );
1221         return (res < size) ? res : size;
1222 }
1223 
1224 static inline unsigned long
1225 ext2_find_next_zero_bit(void *vaddr, unsigned long size, unsigned long offset)
1226 {
1227         unsigned long *addr = vaddr;
1228         unsigned long *p = addr + (offset >> 6);
1229         unsigned long word, reg;
1230         unsigned long bit = offset & 63UL, res;
1231 
1232         if (offset >= size)
1233                 return size;
1234 
1235         if (bit) {
1236                 __asm__("   lrvg %0,%1" /* load reversed, neat instruction */
1237                         : "=a" (word) : "m" (*p) );
1238                 word >>= bit;
1239                 res = bit;
1240                 /* Look for zero in first 8 byte word */
1241                 __asm__("   lghi %2,0xff\n"
1242                         "   tmll %1,0xffff\n"
1243                         "   jno  2f\n"
1244                         "   ahi  %0,16\n"
1245                         "   srlg %1,%1,16\n"
1246                         "0: tmll %1,0xffff\n"
1247                         "   jno  2f\n"
1248                         "   ahi  %0,16\n"
1249                         "   srlg %1,%1,16\n"
1250                         "1: tmll %1,0xffff\n"
1251                         "   jno  2f\n"
1252                         "   ahi  %0,16\n"
1253                         "   srl  %1,16\n"
1254                         "2: tmll %1,0x00ff\n"
1255                         "   jno  3f\n"
1256                         "   ahi  %0,8\n"
1257                         "   srl  %1,8\n"
1258                         "3: ngr  %1,%2\n"
1259                         "   ic   %1,0(%1,%3)\n"
1260                         "   alr  %0,%1"
1261                         : "+&d" (res), "+a" (word), "=&d" (reg)
1262                         : "a" (&_zb_findmap) : "cc" );
1263                 if (res < 64)
1264                         return (p - addr)*64 + res;
1265                 p++;
1266         }
1267         /* No zero yet, search remaining full bytes for a zero */
1268         res = ext2_find_first_zero_bit (p, size - 64 * (p - addr));
1269         return (p - addr) * 64 + res;
1270 }
1271 
1272 #endif /* __s390x__ */
1273 
1274 /* Bitmap functions for the minix filesystem.  */
1275 /* FIXME !!! */
1276 #define minix_test_and_set_bit(nr,addr) test_and_set_bit(nr,addr)
1277 #define minix_set_bit(nr,addr) set_bit(nr,addr)
1278 #define minix_test_and_clear_bit(nr,addr) test_and_clear_bit(nr,addr)
1279 #define minix_test_bit(nr,addr) test_bit(nr,addr)
1280 #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
1281 
1282 #endif /* __KERNEL__ */
1283 
1284 #endif /* _S390_BITOPS_H */
1285 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | Wiki (Japanese) | Wiki (English) | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

osdn.jp