summaryrefslogtreecommitdiffstats
path: root/util
diff options
context:
space:
mode:
authorStefan Hajnoczi2014-12-02 12:23:15 +0100
committerPaolo Bonzini2015-06-05 17:10:00 +0200
commit36546e5b803f6e363906607307f27c489441fd15 (patch)
treeab898dc907aa2a93f39ec00117649bde2dd27e0a /util
parentbitmap: add atomic set functions (diff)
downloadqemu-36546e5b803f6e363906607307f27c489441fd15.tar.gz
qemu-36546e5b803f6e363906607307f27c489441fd15.tar.xz
qemu-36546e5b803f6e363906607307f27c489441fd15.zip
bitmap: add atomic test and clear
The new bitmap_test_and_clear_atomic() function clears a range and returns whether or not the bits were set. Signed-off-by: Stefan Hajnoczi <stefanha@redhat.com> Message-Id: <1417519399-3166-3-git-send-email-stefanha@redhat.com> [Test before xchg; then a full barrier is needed at the end just like in the previous patch. The barrier can be avoided if we did at least one xchg. - Paolo] Reviewed-by: Fam Zheng <famz@redhat.com> Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
Diffstat (limited to 'util')
-rw-r--r--util/bitmap.c45
1 files changed, 45 insertions, 0 deletions
diff --git a/util/bitmap.c b/util/bitmap.c
index 39994af675..300a68e38c 100644
--- a/util/bitmap.c
+++ b/util/bitmap.c
@@ -235,6 +235,51 @@ void bitmap_clear(unsigned long *map, long start, long nr)
}
}
+bool bitmap_test_and_clear_atomic(unsigned long *map, long start, long nr)
+{
+ unsigned long *p = map + BIT_WORD(start);
+ const long size = start + nr;
+ int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
+ unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
+ unsigned long dirty = 0;
+ unsigned long old_bits;
+
+ /* First word */
+ if (nr - bits_to_clear > 0) {
+ old_bits = atomic_fetch_and(p, ~mask_to_clear);
+ dirty |= old_bits & mask_to_clear;
+ nr -= bits_to_clear;
+ bits_to_clear = BITS_PER_LONG;
+ mask_to_clear = ~0UL;
+ p++;
+ }
+
+ /* Full words */
+ if (bits_to_clear == BITS_PER_LONG) {
+ while (nr >= BITS_PER_LONG) {
+ if (*p) {
+ old_bits = atomic_xchg(p, 0);
+ dirty |= old_bits;
+ }
+ nr -= BITS_PER_LONG;
+ p++;
+ }
+ }
+
+ /* Last word */
+ if (nr) {
+ mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
+ old_bits = atomic_fetch_and(p, ~mask_to_clear);
+ dirty |= old_bits & mask_to_clear;
+ } else {
+ if (!dirty) {
+ smp_mb();
+ }
+ }
+
+ return dirty != 0;
+}
+
#define ALIGN_MASK(x,mask) (((x)+(mask))&~(mask))
/**