/* * Copyright 1992, Linus Torvalds. * * Note: inlines with more than a single statement should be marked * __always_inline to avoid problems with older gcc's inlining heuristics.
*/
#ifndef _LINUX_BITOPS_H #error only <linux/bitops.h> can be included directly #endif
/* * These have to be done with inline assembly: that way the bit-setting * is guaranteed to be atomic. All bit operations return 0 if the bit * was cleared before the operation and != 0 if it was not. * * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
*/
/* * We do the locked ops that don't return the old value as * a mask operation on a byte.
*/ #define CONST_MASK_ADDR(nr, addr) WBYTE_ADDR((void *)(addr) + ((nr)>>3)) #define CONST_MASK(nr) (1 << ((nr) & 7))
/* * Note: the operation is performed atomically with respect to * the local CPU, but not other CPUs. Portable code should not * rely on this behaviour. * KVM relies on this behaviour on x86 for modifying memory that is also * accessed from a hypervisor on the same CPU if running in a VM: don't change * this without also updating arch/x86/kernel/kvm.c
*/ static __always_inline bool
arch___test_and_clear_bit(unsignedlong nr, volatileunsignedlong *addr)
{ bool oldbit;
/** * __ffs - find first set bit in word * @word: The word to search * * Undefined if no bit exists, so code should check against 0 first.
*/ #define __ffs(word) \
(__builtin_constant_p(word) ? \
(unsignedlong)__builtin_ctzl(word) : \
variable__ffs(word))
/** * ffz - find first zero bit in word * @word: The word to search * * Undefined if no zero exists, so code should check against ~0UL first.
*/ #define ffz(word) \
(__builtin_constant_p(word) ? \
(unsignedlong)__builtin_ctzl(~word) : \
variable_ffz(word))
/* * __fls: find last set bit in word * @word: The word to search * * Undefined if no set bit exists, so code should check against 0 first.
*/ static __always_inline unsignedlong __fls(unsignedlong word)
{ if (__builtin_constant_p(word)) return BITS_PER_LONG - 1 - __builtin_clzl(word);
#ifdef __KERNEL__ static __always_inline int variable_ffs(int x)
{ int r;
#ifdef CONFIG_X86_64 /* * AMD64 says BSFL won't clobber the dest reg if x==0; Intel64 says the * dest reg is undefined if x==0, but their CPU architect says its * value is written to set it to the same as before, except that the * top 32 bits will be cleared. * * We cannot do this on 32 bits because at the very least some * 486 CPUs did not behave this way.
*/ asm("bsfl %1,%0"
: "=r" (r)
: ASM_INPUT_RM (x), "0" (-1)); #elifdefined(CONFIG_X86_CMOV) asm("bsfl %1,%0\n\t" "cmovzl %2,%0"
: "=&r" (r) : "rm" (x), "r" (-1)); #else asm("bsfl %1,%0\n\t" "jnz 1f\n\t" "movl $-1,%0\n" "1:" : "=r" (r) : "rm" (x)); #endif return r + 1;
}
/** * ffs - find first set bit in word * @x: the word to search * * This is defined the same way as the libc and compiler builtin ffs * routines, therefore differs in spirit from the other bitops. * * ffs(value) returns 0 if value is 0 or the position of the first * set bit if value is nonzero. The first (least significant) bit * is at position 1.
*/ #define ffs(x) (__builtin_constant_p(x) ? __builtin_ffs(x) : variable_ffs(x))
/** * fls - find last set bit in word * @x: the word to search * * This is defined in a similar way as the libc and compiler builtin * ffs, but returns the position of the most significant set bit. * * fls(value) returns 0 if value is 0 or the position of the last * set bit if value is nonzero. The last (most significant) bit is * at position 32.
*/ static __always_inline int fls(unsignedint x)
{ int r;
if (__builtin_constant_p(x)) return x ? 32 - __builtin_clz(x) : 0;
#ifdef CONFIG_X86_64 /* * AMD64 says BSRL won't clobber the dest reg if x==0; Intel64 says the * dest reg is undefined if x==0, but their CPU architect says its * value is written to set it to the same as before, except that the * top 32 bits will be cleared. * * We cannot do this on 32 bits because at the very least some * 486 CPUs did not behave this way.
*/ asm("bsrl %1,%0"
: "=r" (r)
: ASM_INPUT_RM (x), "0" (-1)); #elifdefined(CONFIG_X86_CMOV) asm("bsrl %1,%0\n\t" "cmovzl %2,%0"
: "=&r" (r) : "rm" (x), "rm" (-1)); #else asm("bsrl %1,%0\n\t" "jnz 1f\n\t" "movl $-1,%0\n" "1:" : "=r" (r) : "rm" (x)); #endif return r + 1;
}
/** * fls64 - find last set bit in a 64-bit word * @x: the word to search * * This is defined in a similar way as the libc and compiler builtin * ffsll, but returns the position of the most significant set bit. * * fls64(value) returns 0 if value is 0 or the position of the last * set bit if value is nonzero. The last (most significant) bit is * at position 64.
*/ #ifdef CONFIG_X86_64 static __always_inline int fls64(__u64 x)
{ int bitpos = -1;
if (__builtin_constant_p(x)) return x ? 64 - __builtin_clzll(x) : 0; /* * AMD64 says BSRQ won't clobber the dest reg if x==0; Intel64 says the * dest reg is undefined if x==0, but their CPU architect says its * value is written to set it to the same as before.
*/ asm("bsrq %1,%q0"
: "+r" (bitpos)
: ASM_INPUT_RM (x)); return bitpos + 1;
} #else #include <asm-generic/bitops/fls64.h> #endif
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.