/** * local_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @l: pointer to type local_t * * Atomically subtracts @i from @l and returns * true if the result is zero, or false for all * other cases.
*/ staticinlinebool local_sub_and_test(long i, local_t *l)
{ return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i);
}
/** * local_dec_and_test - decrement and test * @l: pointer to type local_t * * Atomically decrements @l by 1 and * returns true if the result is 0, or false for all other * cases.
*/ staticinlinebool local_dec_and_test(local_t *l)
{ return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e);
}
/** * local_inc_and_test - increment and test * @l: pointer to type local_t * * Atomically increments @l by 1 * and returns true if the result is zero, or false for all * other cases.
*/ staticinlinebool local_inc_and_test(local_t *l)
{ return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e);
}
/** * local_add_negative - add and test if negative * @i: integer value to add * @l: pointer to type local_t * * Atomically adds @i to @l and returns true * if the result is negative, or false when * result is greater than or equal to zero.
*/ staticinlinebool local_add_negative(long i, local_t *l)
{ return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i);
}
/** * local_add_return - add and return * @i: integer value to add * @l: pointer to type local_t * * Atomically adds @i to @l and returns @i + @l
*/ staticinlinelong local_add_return(long i, local_t *l)
{ long __i = i; asmvolatile(_ASM_XADD "%0, %1;"
: "+r" (i), "+m" (l->a.counter)
: : "memory"); return i + __i;
}
staticinlinelong local_sub_return(long i, local_t *l)
{ return local_add_return(-i, l);
}
/* * Implement local_xchg using CMPXCHG instruction without the LOCK prefix. * XCHG is expensive due to the implied LOCK prefix. The processor * cannot prefetch cachelines if XCHG is used.
*/ static __always_inline long
local_xchg(local_t *l, long n)
{ long c = local_read(l);
do { } while (!local_try_cmpxchg(l, &c, n));
return c;
}
/** * local_add_unless - add unless the number is already a given value * @l: pointer of type local_t * @a: the amount to add to l... * @u: ...unless l is equal to u. * * Atomically adds @a to @l, if @v was not already @u. * Returns true if the addition was done.
*/ static __always_inline bool
local_add_unless(local_t *l, long a, long u)
{ long c = local_read(l);
do { if (unlikely(c == u)) returnfalse;
} while (!local_try_cmpxchg(l, &c, c + a));
/* On x86_32, these are no better than the atomic variants. * On x86-64 these are better than the atomic variants on SMP kernels * because they dont use a lock prefix.
*/ #define __local_inc(l) local_inc(l) #define __local_dec(l) local_dec(l) #define __local_add(i, l) local_add((i), (l)) #define __local_sub(i, l) local_sub((i), (l))
#endif/* _ASM_X86_LOCAL_H */
Messung V0.5
¤ Dauer der Verarbeitung: 0.1 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.