/* * The bit modifying instructions on SH-2A are only capable of working * with a 3-bit immediate, which signifies the shift position for the bit * being worked on.
*/ #ifdefined(__BIG_ENDIAN) #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7) #define BYTE_NUMBER(nr) ((nr ^ BITOP_LE_SWIZZLE) / BITS_PER_BYTE) #define BYTE_OFFSET(nr) ((nr ^ BITOP_LE_SWIZZLE) % BITS_PER_BYTE) #else #define BYTE_NUMBER(nr) ((nr) / BITS_PER_BYTE) #define BYTE_OFFSET(nr) ((nr) % BITS_PER_BYTE) #endif
/** * arch___change_bit - Toggle a bit in memory * @nr: the bit to change * @addr: the address to start counting from * * Unlike change_bit(), this function is non-atomic and may be reordered. * If it's called on the same region of memory simultaneously, the effect * may be that only one operation succeeds.
*/ static __always_inline void
arch___change_bit(unsignedlong nr, volatileunsignedlong *addr)
{ if (__builtin_constant_p(nr)) {
__asm__ __volatile__ ( "bxor.b %1, @(%O2,%0) ! __change_bit\n\t"
: "+r" (addr)
: "i" (BYTE_OFFSET(nr)), "i" (BYTE_NUMBER(nr))
: "t", "memory"
);
} else { unsignedlong mask = BIT_MASK(nr); unsignedlong *p = ((unsignedlong *)addr) + BIT_WORD(nr);
*p ^= mask;
}
}
/** * arch___test_and_set_bit - Set a bit and return its old value * @nr: Bit to set * @addr: Address to count from * * This operation is non-atomic and can be reordered. * If two examples of this operation race, one can appear to succeed * but actually fail. You must protect multiple accesses with a lock.
*/ static __always_inline bool
arch___test_and_set_bit(unsignedlong nr, volatileunsignedlong *addr)
{ unsignedlong mask = BIT_MASK(nr); unsignedlong *p = ((unsignedlong *)addr) + BIT_WORD(nr); unsignedlong old = *p;
*p = old | mask; return (old & mask) != 0;
}
/** * arch___test_and_clear_bit - Clear a bit and return its old value * @nr: Bit to clear * @addr: Address to count from * * This operation is non-atomic and can be reordered. * If two examples of this operation race, one can appear to succeed * but actually fail. You must protect multiple accesses with a lock.
*/ static __always_inline bool
arch___test_and_clear_bit(unsignedlong nr, volatileunsignedlong *addr)
{ unsignedlong mask = BIT_MASK(nr); unsignedlong *p = ((unsignedlong *)addr) + BIT_WORD(nr); unsignedlong old = *p;
*p = old & ~mask; return (old & mask) != 0;
}
/* WARNING: non atomic and it can be reordered! */ static __always_inline bool
arch___test_and_change_bit(unsignedlong nr, volatileunsignedlong *addr)
{ unsignedlong mask = BIT_MASK(nr); unsignedlong *p = ((unsignedlong *)addr) + BIT_WORD(nr); unsignedlong old = *p;
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.