Pull request

-----BEGIN PGP SIGNATURE-----
 
 iQIcBAABAgAGBQJcPoIpAAoJEH3vgQaq/DkONy8P/28kkLogU55oNitKnt8oFxAE
 sfDBnEk+3rJV6L+fPN0TlqDnKDV0iEylpUTsCTEdL4OAtBaR1v0lhQzhGQIE0bHy
 6fk6nEiCgUVhDoQSGGFXqBq/Z6eYu6nsOfPF+SoS1Auv1ISB7KZdHR6K0dRcOUKk
 L3uIGDmop5+c1j5deG+90MYj38wvu7NwD2zt4kiMdYNUQHT4XtE591yvIUVMBo+s
 gE3vunsyu8++iS8NvxM7yJzsZStRoCRvks6/aBuGISfCWuJUpycdI6QpK+phitDs
 SdjN8jlAcY947o3xKwaKI8Brit+xoVDCw/0RaTwh14s4gBj7lI3B+XmlRGHiz/mw
 Ry8bqrzs/wo8Thr263rVr1gBnQQKcb4mIPEa7pqELvdnANdI3kOs78vwBw/wW2/+
 CTxAcrgyHItod5zKyxqiMDcoirtvHF55TIEpVuGhTfixOVQGNazBsP3IqfCCh03C
 Bx8SZEVP9PEV+LB/Y3v4n+xuJaCBrbAA0KQ5XMSuhE/nLatela9gcuwe9QNFnM4p
 G0nN4hQ/mksja1oj5VfRTmx4YrY67PAK+D+TBjUvKu4luUW+2cOTkqSDbRXwhplp
 kFF52alkv6m0SLWHUwOMXsgZBnNl7goIT4so4RfR2X3pH3/yj24ce8df7oKRGXMN
 ASBY5X3kqalOzglPNTp1
 =4Y4H
 -----END PGP SIGNATURE-----

Merge remote-tracking branch 'remotes/jnsnow/tags/bitmaps-pull-request' into staging

Pull request

# gpg: Signature made Wed 16 Jan 2019 01:00:25 GMT
# gpg:                using RSA key 7DEF8106AAFC390E
# gpg: Good signature from "John Snow (John Huston) <jsnow@redhat.com>"
# Primary key fingerprint: FAEB 9711 A12C F475 812F  18F2 88A9 064D 1835 61EB
#      Subkey fingerprint: F9B7 ABDB BCAC DF95 BE76  CBD0 7DEF 8106 AAFC 390E

* remotes/jnsnow/tags/bitmaps-pull-request:
  Revert "hbitmap: Add @advance param to hbitmap_iter_next()"
  Revert "test-hbitmap: Add non-advancing iter_next tests"
  Revert "block/dirty-bitmap: Add bdrv_dirty_iter_next_area"
  block/mirror: fix and improve do_sync_target_write
  tests: add tests for hbitmap_next_dirty_area
  dirty-bitmap: add bdrv_dirty_bitmap_next_dirty_area
  tests: add tests for hbitmap_next_zero with specified end parameter
  dirty-bitmap: improve bdrv_dirty_bitmap_next_zero

Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Peter Maydell 2019-01-17 12:48:42 +00:00
commit 681d61362d
8 changed files with 262 additions and 121 deletions

View File

@ -385,7 +385,7 @@ static int coroutine_fn backup_run_incremental(BackupBlockJob *job)
HBitmapIter hbi;
hbitmap_iter_init(&hbi, job->copy_bitmap, 0);
while ((cluster = hbitmap_iter_next(&hbi, true)) != -1) {
while ((cluster = hbitmap_iter_next(&hbi)) != -1) {
do {
if (yield_and_check(job)) {
return 0;
@ -422,7 +422,8 @@ static void backup_incremental_init_copy_bitmap(BackupBlockJob *job)
break;
}
offset = bdrv_dirty_bitmap_next_zero(job->sync_bitmap, offset);
offset = bdrv_dirty_bitmap_next_zero(job->sync_bitmap, offset,
UINT64_MAX);
if (offset == -1) {
hbitmap_set(job->copy_bitmap, cluster, end - cluster);
break;

View File

@ -515,62 +515,7 @@ void bdrv_dirty_iter_free(BdrvDirtyBitmapIter *iter)
int64_t bdrv_dirty_iter_next(BdrvDirtyBitmapIter *iter)
{
return hbitmap_iter_next(&iter->hbi, true);
}
/**
* Return the next consecutively dirty area in the dirty bitmap
* belonging to the given iterator @iter.
*
* @max_offset: Maximum value that may be returned for
* *offset + *bytes
* @offset: Will contain the start offset of the next dirty area
* @bytes: Will contain the length of the next dirty area
*
* Returns: True if a dirty area could be found before max_offset
* (which means that *offset and *bytes then contain valid
* values), false otherwise.
*
* Note that @iter is never advanced if false is returned. If an area
* is found (which means that true is returned), it will be advanced
* past that area.
*/
bool bdrv_dirty_iter_next_area(BdrvDirtyBitmapIter *iter, uint64_t max_offset,
uint64_t *offset, int *bytes)
{
uint32_t granularity = bdrv_dirty_bitmap_granularity(iter->bitmap);
uint64_t gran_max_offset;
int64_t ret;
int size;
if (max_offset == iter->bitmap->size) {
/* If max_offset points to the image end, round it up by the
* bitmap granularity */
gran_max_offset = ROUND_UP(max_offset, granularity);
} else {
gran_max_offset = max_offset;
}
ret = hbitmap_iter_next(&iter->hbi, false);
if (ret < 0 || ret + granularity > gran_max_offset) {
return false;
}
*offset = ret;
size = 0;
assert(granularity <= INT_MAX);
do {
/* Advance iterator */
ret = hbitmap_iter_next(&iter->hbi, true);
size += granularity;
} while (ret + granularity <= gran_max_offset &&
hbitmap_iter_next(&iter->hbi, false) == ret + granularity &&
size <= INT_MAX - granularity);
*bytes = MIN(size, max_offset - *offset);
return true;
return hbitmap_iter_next(&iter->hbi);
}
/* Called within bdrv_dirty_bitmap_lock..unlock */
@ -781,9 +726,16 @@ char *bdrv_dirty_bitmap_sha256(const BdrvDirtyBitmap *bitmap, Error **errp)
return hbitmap_sha256(bitmap->bitmap, errp);
}
int64_t bdrv_dirty_bitmap_next_zero(BdrvDirtyBitmap *bitmap, uint64_t offset)
int64_t bdrv_dirty_bitmap_next_zero(BdrvDirtyBitmap *bitmap, uint64_t offset,
uint64_t bytes)
{
return hbitmap_next_zero(bitmap->bitmap, offset);
return hbitmap_next_zero(bitmap->bitmap, offset, bytes);
}
bool bdrv_dirty_bitmap_next_dirty_area(BdrvDirtyBitmap *bitmap,
uint64_t *offset, uint64_t *bytes)
{
return hbitmap_next_dirty_area(bitmap->bitmap, offset, bytes);
}
void bdrv_merge_dirty_bitmap(BdrvDirtyBitmap *dest, const BdrvDirtyBitmap *src,

View File

@ -1185,25 +1185,23 @@ do_sync_target_write(MirrorBlockJob *job, MirrorMethod method,
uint64_t offset, uint64_t bytes,
QEMUIOVector *qiov, int flags)
{
BdrvDirtyBitmapIter *iter;
QEMUIOVector target_qiov;
uint64_t dirty_offset;
int dirty_bytes;
uint64_t dirty_offset = offset;
uint64_t dirty_bytes;
if (qiov) {
qemu_iovec_init(&target_qiov, qiov->niov);
}
iter = bdrv_dirty_iter_new(job->dirty_bitmap);
bdrv_set_dirty_iter(iter, offset);
while (true) {
bool valid_area;
int ret;
bdrv_dirty_bitmap_lock(job->dirty_bitmap);
valid_area = bdrv_dirty_iter_next_area(iter, offset + bytes,
&dirty_offset, &dirty_bytes);
dirty_bytes = MIN(offset + bytes - dirty_offset, INT_MAX);
valid_area = bdrv_dirty_bitmap_next_dirty_area(job->dirty_bitmap,
&dirty_offset,
&dirty_bytes);
if (!valid_area) {
bdrv_dirty_bitmap_unlock(job->dirty_bitmap);
break;
@ -1259,9 +1257,10 @@ do_sync_target_write(MirrorBlockJob *job, MirrorMethod method,
break;
}
}
dirty_offset += dirty_bytes;
}
bdrv_dirty_iter_free(iter);
if (qiov) {
qemu_iovec_destroy(&target_qiov);
}

View File

@ -83,8 +83,6 @@ void bdrv_set_dirty_bitmap_locked(BdrvDirtyBitmap *bitmap,
void bdrv_reset_dirty_bitmap_locked(BdrvDirtyBitmap *bitmap,
int64_t offset, int64_t bytes);
int64_t bdrv_dirty_iter_next(BdrvDirtyBitmapIter *iter);
bool bdrv_dirty_iter_next_area(BdrvDirtyBitmapIter *iter, uint64_t max_offset,
uint64_t *offset, int *bytes);
void bdrv_set_dirty_iter(BdrvDirtyBitmapIter *hbi, int64_t offset);
int64_t bdrv_get_dirty_count(BdrvDirtyBitmap *bitmap);
int64_t bdrv_get_meta_dirty_count(BdrvDirtyBitmap *bitmap);
@ -99,7 +97,10 @@ bool bdrv_has_changed_persistent_bitmaps(BlockDriverState *bs);
BdrvDirtyBitmap *bdrv_dirty_bitmap_next(BlockDriverState *bs,
BdrvDirtyBitmap *bitmap);
char *bdrv_dirty_bitmap_sha256(const BdrvDirtyBitmap *bitmap, Error **errp);
int64_t bdrv_dirty_bitmap_next_zero(BdrvDirtyBitmap *bitmap, uint64_t start);
int64_t bdrv_dirty_bitmap_next_zero(BdrvDirtyBitmap *bitmap, uint64_t offset,
uint64_t bytes);
bool bdrv_dirty_bitmap_next_dirty_area(BdrvDirtyBitmap *bitmap,
uint64_t *offset, uint64_t *bytes);
BdrvDirtyBitmap *bdrv_reclaim_dirty_bitmap_locked(BlockDriverState *bs,
BdrvDirtyBitmap *bitmap,
Error **errp);

View File

@ -300,12 +300,32 @@ void hbitmap_iter_init(HBitmapIter *hbi, const HBitmap *hb, uint64_t first);
unsigned long hbitmap_iter_skip_words(HBitmapIter *hbi);
/* hbitmap_next_zero:
*
* Find next not dirty bit within selected range. If not found, return -1.
*
* @hb: The HBitmap to operate on
* @start: The bit to start from.
*
* Find next not dirty bit.
* @count: Number of bits to proceed. If @start+@count > bitmap size, the whole
* bitmap is looked through. You can use UINT64_MAX as @count to search up to
* the bitmap end.
*/
int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start);
int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start, uint64_t count);
/* hbitmap_next_dirty_area:
* @hb: The HBitmap to operate on
* @start: in-out parameter.
* in: the offset to start from
* out: (if area found) start of found area
* @count: in-out parameter.
* in: length of requested region
* out: length of found area
*
* If dirty area found within [@start, @start + @count), returns true and sets
* @offset and @bytes appropriately. Otherwise returns false and leaves @offset
* and @bytes unchanged.
*/
bool hbitmap_next_dirty_area(const HBitmap *hb, uint64_t *start,
uint64_t *count);
/* hbitmap_create_meta:
* Create a "meta" hbitmap to track dirtiness of the bits in this HBitmap.
@ -331,14 +351,11 @@ void hbitmap_free_meta(HBitmap *hb);
/**
* hbitmap_iter_next:
* @hbi: HBitmapIter to operate on.
* @advance: If true, advance the iterator. Otherwise, the next call
* of this function will return the same result (if that
* position is still dirty).
*
* Return the next bit that is set in @hbi's associated HBitmap,
* or -1 if all remaining bits are zero.
*/
int64_t hbitmap_iter_next(HBitmapIter *hbi, bool advance);
int64_t hbitmap_iter_next(HBitmapIter *hbi);
/**
* hbitmap_iter_next_word:

View File

@ -2014,7 +2014,7 @@ static unsigned int bitmap_to_extents(BdrvDirtyBitmap *bitmap, uint64_t offset,
bool next_dirty = !dirty;
if (dirty) {
end = bdrv_dirty_bitmap_next_zero(bitmap, begin);
end = bdrv_dirty_bitmap_next_zero(bitmap, begin, UINT64_MAX);
} else {
bdrv_set_dirty_iter(it, begin);
end = bdrv_dirty_iter_next(it);

View File

@ -30,18 +30,6 @@ typedef struct TestHBitmapData {
} TestHBitmapData;
static int64_t check_hbitmap_iter_next(HBitmapIter *hbi)
{
int next0, next1;
next0 = hbitmap_iter_next(hbi, false);
next1 = hbitmap_iter_next(hbi, true);
g_assert_cmpint(next0, ==, next1);
return next0;
}
/* Check that the HBitmap and the shadow bitmap contain the same data,
* ignoring the same "first" bits.
*/
@ -58,7 +46,7 @@ static void hbitmap_test_check(TestHBitmapData *data,
i = first;
for (;;) {
next = check_hbitmap_iter_next(&hbi);
next = hbitmap_iter_next(&hbi);
if (next < 0) {
next = data->size;
}
@ -447,25 +435,25 @@ static void test_hbitmap_iter_granularity(TestHBitmapData *data,
/* Note that hbitmap_test_check has to be invoked manually in this test. */
hbitmap_test_init(data, 131072 << 7, 7);
hbitmap_iter_init(&hbi, data->hb, 0);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), <, 0);
g_assert_cmpint(hbitmap_iter_next(&hbi), <, 0);
hbitmap_test_set(data, ((L2 + L1 + 1) << 7) + 8, 8);
hbitmap_iter_init(&hbi, data->hb, 0);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), ==, (L2 + L1 + 1) << 7);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), <, 0);
g_assert_cmpint(hbitmap_iter_next(&hbi), ==, (L2 + L1 + 1) << 7);
g_assert_cmpint(hbitmap_iter_next(&hbi), <, 0);
hbitmap_iter_init(&hbi, data->hb, (L2 + L1 + 2) << 7);
g_assert_cmpint(hbitmap_iter_next(&hbi, true), <, 0);
g_assert_cmpint(hbitmap_iter_next(&hbi), <, 0);
hbitmap_test_set(data, (131072 << 7) - 8, 8);
hbitmap_iter_init(&hbi, data->hb, 0);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), ==, (L2 + L1 + 1) << 7);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), ==, 131071 << 7);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), <, 0);
g_assert_cmpint(hbitmap_iter_next(&hbi), ==, (L2 + L1 + 1) << 7);
g_assert_cmpint(hbitmap_iter_next(&hbi), ==, 131071 << 7);
g_assert_cmpint(hbitmap_iter_next(&hbi), <, 0);
hbitmap_iter_init(&hbi, data->hb, (L2 + L1 + 2) << 7);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), ==, 131071 << 7);
g_assert_cmpint(check_hbitmap_iter_next(&hbi), <, 0);
g_assert_cmpint(hbitmap_iter_next(&hbi), ==, 131071 << 7);
g_assert_cmpint(hbitmap_iter_next(&hbi), <, 0);
}
static void hbitmap_test_set_boundary_bits(TestHBitmapData *data, ssize_t diff)
@ -905,7 +893,7 @@ static void test_hbitmap_serialize_zeroes(TestHBitmapData *data,
for (i = 0; i < num_positions; i++) {
hbitmap_deserialize_zeroes(data->hb, positions[i], min_l1, true);
hbitmap_iter_init(&iter, data->hb, 0);
next = check_hbitmap_iter_next(&iter);
next = hbitmap_iter_next(&iter);
if (i == num_positions - 1) {
g_assert_cmpint(next, ==, -1);
} else {
@ -931,37 +919,55 @@ static void test_hbitmap_iter_and_reset(TestHBitmapData *data,
hbitmap_iter_init(&hbi, data->hb, BITS_PER_LONG - 1);
check_hbitmap_iter_next(&hbi);
hbitmap_iter_next(&hbi);
hbitmap_reset_all(data->hb);
check_hbitmap_iter_next(&hbi);
hbitmap_iter_next(&hbi);
}
static void test_hbitmap_next_zero_check(TestHBitmapData *data, int64_t start)
static void test_hbitmap_next_zero_check_range(TestHBitmapData *data,
uint64_t start,
uint64_t count)
{
int64_t ret1 = hbitmap_next_zero(data->hb, start);
int64_t ret1 = hbitmap_next_zero(data->hb, start, count);
int64_t ret2 = start;
for ( ; ret2 < data->size && hbitmap_get(data->hb, ret2); ret2++) {
int64_t end = start >= data->size || data->size - start < count ?
data->size : start + count;
for ( ; ret2 < end && hbitmap_get(data->hb, ret2); ret2++) {
;
}
if (ret2 == data->size) {
if (ret2 == end) {
ret2 = -1;
}
g_assert_cmpint(ret1, ==, ret2);
}
static void test_hbitmap_next_zero_check(TestHBitmapData *data, int64_t start)
{
test_hbitmap_next_zero_check_range(data, start, UINT64_MAX);
}
static void test_hbitmap_next_zero_do(TestHBitmapData *data, int granularity)
{
hbitmap_test_init(data, L3, granularity);
test_hbitmap_next_zero_check(data, 0);
test_hbitmap_next_zero_check(data, L3 - 1);
test_hbitmap_next_zero_check_range(data, 0, 1);
test_hbitmap_next_zero_check_range(data, L3 - 1, 1);
hbitmap_set(data->hb, L2, 1);
test_hbitmap_next_zero_check(data, 0);
test_hbitmap_next_zero_check(data, L2 - 1);
test_hbitmap_next_zero_check(data, L2);
test_hbitmap_next_zero_check(data, L2 + 1);
test_hbitmap_next_zero_check_range(data, 0, 1);
test_hbitmap_next_zero_check_range(data, 0, L2);
test_hbitmap_next_zero_check_range(data, L2 - 1, 1);
test_hbitmap_next_zero_check_range(data, L2 - 1, 2);
test_hbitmap_next_zero_check_range(data, L2, 1);
test_hbitmap_next_zero_check_range(data, L2 + 1, 1);
hbitmap_set(data->hb, L2 + 5, L1);
test_hbitmap_next_zero_check(data, 0);
@ -970,6 +976,10 @@ static void test_hbitmap_next_zero_do(TestHBitmapData *data, int granularity)
test_hbitmap_next_zero_check(data, L2 + 5);
test_hbitmap_next_zero_check(data, L2 + L1 - 1);
test_hbitmap_next_zero_check(data, L2 + L1);
test_hbitmap_next_zero_check_range(data, L2, 6);
test_hbitmap_next_zero_check_range(data, L2 + 1, 3);
test_hbitmap_next_zero_check_range(data, L2 + 4, L1);
test_hbitmap_next_zero_check_range(data, L2 + 5, L1);
hbitmap_set(data->hb, L2 * 2, L3 - L2 * 2);
test_hbitmap_next_zero_check(data, L2 * 2 - L1);
@ -977,6 +987,8 @@ static void test_hbitmap_next_zero_do(TestHBitmapData *data, int granularity)
test_hbitmap_next_zero_check(data, L2 * 2 - 1);
test_hbitmap_next_zero_check(data, L2 * 2);
test_hbitmap_next_zero_check(data, L3 - 1);
test_hbitmap_next_zero_check_range(data, L2 * 2 - L1, L1 + 1);
test_hbitmap_next_zero_check_range(data, L2 * 2, L2);
hbitmap_set(data->hb, 0, L3);
test_hbitmap_next_zero_check(data, 0);
@ -992,6 +1004,106 @@ static void test_hbitmap_next_zero_4(TestHBitmapData *data, const void *unused)
test_hbitmap_next_zero_do(data, 4);
}
static void test_hbitmap_next_dirty_area_check(TestHBitmapData *data,
uint64_t offset,
uint64_t count)
{
uint64_t off1, off2;
uint64_t len1 = 0, len2;
bool ret1, ret2;
int64_t end;
off1 = offset;
len1 = count;
ret1 = hbitmap_next_dirty_area(data->hb, &off1, &len1);
end = offset > data->size || data->size - offset < count ? data->size :
offset + count;
for (off2 = offset; off2 < end && !hbitmap_get(data->hb, off2); off2++) {
;
}
for (len2 = 1; off2 + len2 < end && hbitmap_get(data->hb, off2 + len2);
len2++) {
;
}
ret2 = off2 < end;
if (!ret2) {
/* leave unchanged */
off2 = offset;
len2 = count;
}
g_assert_cmpint(ret1, ==, ret2);
g_assert_cmpint(off1, ==, off2);
g_assert_cmpint(len1, ==, len2);
}
static void test_hbitmap_next_dirty_area_do(TestHBitmapData *data,
int granularity)
{
hbitmap_test_init(data, L3, granularity);
test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, 0, 1);
test_hbitmap_next_dirty_area_check(data, L3 - 1, 1);
hbitmap_set(data->hb, L2, 1);
test_hbitmap_next_dirty_area_check(data, 0, 1);
test_hbitmap_next_dirty_area_check(data, 0, L2);
test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2 - 1, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2 - 1, 1);
test_hbitmap_next_dirty_area_check(data, L2 - 1, 2);
test_hbitmap_next_dirty_area_check(data, L2 - 1, 3);
test_hbitmap_next_dirty_area_check(data, L2, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2, 1);
test_hbitmap_next_dirty_area_check(data, L2 + 1, 1);
hbitmap_set(data->hb, L2 + 5, L1);
test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2 - 2, 8);
test_hbitmap_next_dirty_area_check(data, L2 + 1, 5);
test_hbitmap_next_dirty_area_check(data, L2 + 1, 3);
test_hbitmap_next_dirty_area_check(data, L2 + 4, L1);
test_hbitmap_next_dirty_area_check(data, L2 + 5, L1);
test_hbitmap_next_dirty_area_check(data, L2 + 7, L1);
test_hbitmap_next_dirty_area_check(data, L2 + L1, L1);
test_hbitmap_next_dirty_area_check(data, L2, 0);
test_hbitmap_next_dirty_area_check(data, L2 + 1, 0);
hbitmap_set(data->hb, L2 * 2, L3 - L2 * 2);
test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2 + 1, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2 + 5 + L1 - 1, UINT64_MAX);
test_hbitmap_next_dirty_area_check(data, L2 + 5 + L1, 5);
test_hbitmap_next_dirty_area_check(data, L2 * 2 - L1, L1 + 1);
test_hbitmap_next_dirty_area_check(data, L2 * 2, L2);
hbitmap_set(data->hb, 0, L3);
test_hbitmap_next_dirty_area_check(data, 0, UINT64_MAX);
}
static void test_hbitmap_next_dirty_area_0(TestHBitmapData *data,
const void *unused)
{
test_hbitmap_next_dirty_area_do(data, 0);
}
static void test_hbitmap_next_dirty_area_1(TestHBitmapData *data,
const void *unused)
{
test_hbitmap_next_dirty_area_do(data, 1);
}
static void test_hbitmap_next_dirty_area_4(TestHBitmapData *data,
const void *unused)
{
test_hbitmap_next_dirty_area_do(data, 4);
}
int main(int argc, char **argv)
{
g_test_init(&argc, &argv, NULL);
@ -1058,6 +1170,13 @@ int main(int argc, char **argv)
hbitmap_test_add("/hbitmap/next_zero/next_zero_4",
test_hbitmap_next_zero_4);
hbitmap_test_add("/hbitmap/next_dirty_area/next_dirty_area_0",
test_hbitmap_next_dirty_area_0);
hbitmap_test_add("/hbitmap/next_dirty_area/next_dirty_area_1",
test_hbitmap_next_dirty_area_1);
hbitmap_test_add("/hbitmap/next_dirty_area/next_dirty_area_4",
test_hbitmap_next_dirty_area_4);
g_test_run();
return 0;

View File

@ -53,6 +53,9 @@
*/
struct HBitmap {
/* Size of the bitmap, as requested in hbitmap_alloc. */
uint64_t orig_size;
/* Number of total bits in the bottom level. */
uint64_t size;
@ -141,7 +144,7 @@ unsigned long hbitmap_iter_skip_words(HBitmapIter *hbi)
return cur;
}
int64_t hbitmap_iter_next(HBitmapIter *hbi, bool advance)
int64_t hbitmap_iter_next(HBitmapIter *hbi)
{
unsigned long cur = hbi->cur[HBITMAP_LEVELS - 1] &
hbi->hb->levels[HBITMAP_LEVELS - 1][hbi->pos];
@ -154,12 +157,8 @@ int64_t hbitmap_iter_next(HBitmapIter *hbi, bool advance)
}
}
if (advance) {
/* The next call will resume work from the next bit. */
hbi->cur[HBITMAP_LEVELS - 1] = cur & (cur - 1);
} else {
hbi->cur[HBITMAP_LEVELS - 1] = cur;
}
/* The next call will resume work from the next bit. */
hbi->cur[HBITMAP_LEVELS - 1] = cur & (cur - 1);
item = ((uint64_t)hbi->pos << BITS_PER_LEVEL) + ctzl(cur);
return item << hbi->granularity;
@ -192,16 +191,28 @@ void hbitmap_iter_init(HBitmapIter *hbi, const HBitmap *hb, uint64_t first)
}
}
int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start)
int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start, uint64_t count)
{
size_t pos = (start >> hb->granularity) >> BITS_PER_LEVEL;
unsigned long *last_lev = hb->levels[HBITMAP_LEVELS - 1];
uint64_t sz = hb->sizes[HBITMAP_LEVELS - 1];
unsigned long cur = last_lev[pos];
unsigned start_bit_offset =
(start >> hb->granularity) & (BITS_PER_LONG - 1);
unsigned start_bit_offset;
uint64_t end_bit, sz;
int64_t res;
if (start >= hb->orig_size || count == 0) {
return -1;
}
end_bit = count > hb->orig_size - start ?
hb->size :
((start + count - 1) >> hb->granularity) + 1;
sz = (end_bit + BITS_PER_LONG - 1) >> BITS_PER_LEVEL;
/* There may be some zero bits in @cur before @start. We are not interested
* in them, let's set them.
*/
start_bit_offset = (start >> hb->granularity) & (BITS_PER_LONG - 1);
cur |= (1UL << start_bit_offset) - 1;
assert((start >> hb->granularity) < hb->size);
@ -218,7 +229,7 @@ int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start)
}
res = (pos << BITS_PER_LEVEL) + ctol(cur);
if (res >= hb->size) {
if (res >= end_bit) {
return -1;
}
@ -231,6 +242,45 @@ int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start)
return res;
}
bool hbitmap_next_dirty_area(const HBitmap *hb, uint64_t *start,
uint64_t *count)
{
HBitmapIter hbi;
int64_t firt_dirty_off, area_end;
uint32_t granularity = 1UL << hb->granularity;
uint64_t end;
if (*start >= hb->orig_size || *count == 0) {
return false;
}
end = *count > hb->orig_size - *start ? hb->orig_size : *start + *count;
hbitmap_iter_init(&hbi, hb, *start);
firt_dirty_off = hbitmap_iter_next(&hbi);
if (firt_dirty_off < 0 || firt_dirty_off >= end) {
return false;
}
if (firt_dirty_off + granularity >= end) {
area_end = end;
} else {
area_end = hbitmap_next_zero(hb, firt_dirty_off + granularity,
end - firt_dirty_off - granularity);
if (area_end < 0) {
area_end = end;
}
}
if (firt_dirty_off > *start) {
*start = firt_dirty_off;
}
*count = area_end - *start;
return true;
}
bool hbitmap_empty(const HBitmap *hb)
{
return hb->count == 0;
@ -652,6 +702,8 @@ HBitmap *hbitmap_alloc(uint64_t size, int granularity)
HBitmap *hb = g_new0(struct HBitmap, 1);
unsigned i;
hb->orig_size = size;
assert(granularity >= 0 && granularity < 64);
size = (size + (1ULL << granularity) - 1) >> granularity;
assert(size <= ((uint64_t)1 << HBITMAP_LOG_MAX_SIZE));