summaryrefslogtreecommitdiffstats
path: root/src/arch
diff options
context:
space:
mode:
authorMichael Brown2016-03-16 22:18:33 +0100
committerMichael Brown2016-03-16 23:33:40 +0100
commitc867b5ab1ff00fae7f5d89bc0c5f273c40f37f90 (patch)
treecd90bd4d94a1576c9cb15aa9ced285373ad8adee /src/arch
parent[pseudobit] Rename bitops.h to pseudobit.h (diff)
downloadipxe-c867b5ab1ff00fae7f5d89bc0c5f273c40f37f90.tar.gz
ipxe-c867b5ab1ff00fae7f5d89bc0c5f273c40f37f90.tar.xz
ipxe-c867b5ab1ff00fae7f5d89bc0c5f273c40f37f90.zip
[bitops] Add generic atomic bit test, set, and clear functions
Signed-off-by: Michael Brown <mcb30@ipxe.org>
Diffstat (limited to 'src/arch')
-rw-r--r--src/arch/x86/include/bits/bitops.h94
1 files changed, 94 insertions, 0 deletions
diff --git a/src/arch/x86/include/bits/bitops.h b/src/arch/x86/include/bits/bitops.h
new file mode 100644
index 00000000..17dcf102
--- /dev/null
+++ b/src/arch/x86/include/bits/bitops.h
@@ -0,0 +1,94 @@
+#ifndef _BITS_BITOPS_H
+#define _BITS_BITOPS_H
+
+/** @file
+ *
+ * x86 bit operations
+ *
+ * We perform atomic bit set and bit clear operations using "lock bts"
+ * and "lock btr". We use the output constraint to inform the
+ * compiler that any memory from the start of the bit field up to and
+ * including the byte containing the bit may be modified. (This is
+ * overkill but shouldn't matter in practice since we're unlikely to
+ * subsequently read other bits from the same bit field.)
+ */
+
+FILE_LICENCE ( GPL2_OR_LATER_OR_UBDL );
+
+#include <stdint.h>
+
+/**
+ * Set bit atomically
+ *
+ * @v bit Bit to set
+ * @v bits Bit field
+ */
+static inline __attribute__ (( always_inline )) void
+set_bit ( unsigned int bit, volatile void *bits ) {
+ volatile struct {
+ uint8_t byte[ ( bit / 8 ) + 1 ];
+ } *bytes = bits;
+
+ __asm__ __volatile__ ( "lock bts %1, %0"
+ : "+m" ( *bytes ) : "Ir" ( bit ) );
+}
+
+/**
+ * Clear bit atomically
+ *
+ * @v bit Bit to set
+ * @v bits Bit field
+ */
+static inline __attribute__ (( always_inline )) void
+clear_bit ( unsigned int bit, volatile void *bits ) {
+ volatile struct {
+ uint8_t byte[ ( bit / 8 ) + 1 ];
+ } *bytes = bits;
+
+ __asm__ __volatile__ ( "lock btr %1, %0"
+ : "+m" ( *bytes ) : "Ir" ( bit ) );
+}
+
+/**
+ * Test and set bit atomically
+ *
+ * @v bit Bit to set
+ * @v bits Bit field
+ * @ret old Old value of bit (zero or non-zero)
+ */
+static inline __attribute__ (( always_inline )) int
+test_and_set_bit ( unsigned int bit, volatile void *bits ) {
+ volatile struct {
+ uint8_t byte[ ( bit / 8 ) + 1 ];
+ } *bytes = bits;
+ int old;
+
+ __asm__ __volatile__ ( "lock bts %2, %0\n\t"
+ "sbb %1, %1\n\t"
+ : "+m" ( *bytes ), "=r" ( old )
+ : "Ir" ( bit ) );
+ return old;
+}
+
+/**
+ * Test and clear bit atomically
+ *
+ * @v bit Bit to set
+ * @v bits Bit field
+ * @ret old Old value of bit (zero or non-zero)
+ */
+static inline __attribute__ (( always_inline )) int
+test_and_clear_bit ( unsigned int bit, volatile void *bits ) {
+ volatile struct {
+ uint8_t byte[ ( bit / 8 ) + 1 ];
+ } *bytes = bits;
+ int old;
+
+ __asm__ __volatile__ ( "lock btr %2, %0\n\t"
+ "sbb %1, %1\n\t"
+ : "+m" ( *bytes ), "=r" ( old )
+ : "Ir" ( bit ) );
+ return old;
+}
+
+#endif /* _BITS_BITOPS_H */