dirty-bitmap: Convert internal hbitmap size/granularity

Now that all callers are using byte-based interfaces, there's no
reason for our internal hbitmap to remain with sector-based
granularity.  It also simplifies our internal scaling, since we
already know that hbitmap widens requests out to granularity
boundaries.

Signed-off-by: Eric Blake <eblake@redhat.com>
Reviewed-by: John Snow <jsnow@redhat.com>
Reviewed-by: Kevin Wolf <kwolf@redhat.com>
Reviewed-by: Fam Zheng <famz@redhat.com>
Signed-off-by: Kevin Wolf <kwolf@redhat.com>
This commit is contained in:
Eric Blake 2017-09-25 09:55:26 -05:00 committed by Kevin Wolf
parent 0fdf1a4f68
commit ca75962244
1 changed files with 18 additions and 44 deletions

View File

@ -38,7 +38,7 @@
*/ */
struct BdrvDirtyBitmap { struct BdrvDirtyBitmap {
QemuMutex *mutex; QemuMutex *mutex;
HBitmap *bitmap; /* Dirty sector bitmap implementation */ HBitmap *bitmap; /* Dirty bitmap implementation */
HBitmap *meta; /* Meta dirty bitmap */ HBitmap *meta; /* Meta dirty bitmap */
BdrvDirtyBitmap *successor; /* Anonymous child; implies frozen status */ BdrvDirtyBitmap *successor; /* Anonymous child; implies frozen status */
char *name; /* Optional non-empty unique ID */ char *name; /* Optional non-empty unique ID */
@ -130,12 +130,7 @@ BdrvDirtyBitmap *bdrv_create_dirty_bitmap(BlockDriverState *bs,
} }
bitmap = g_new0(BdrvDirtyBitmap, 1); bitmap = g_new0(BdrvDirtyBitmap, 1);
bitmap->mutex = &bs->dirty_bitmap_mutex; bitmap->mutex = &bs->dirty_bitmap_mutex;
/* bitmap->bitmap = hbitmap_alloc(bitmap_size, ctz32(granularity));
* TODO - let hbitmap track full granularity. For now, it is tracking
* only sector granularity, as a shortcut for our iterators.
*/
bitmap->bitmap = hbitmap_alloc(DIV_ROUND_UP(bitmap_size, BDRV_SECTOR_SIZE),
ctz32(granularity) - BDRV_SECTOR_BITS);
bitmap->size = bitmap_size; bitmap->size = bitmap_size;
bitmap->name = g_strdup(name); bitmap->name = g_strdup(name);
bitmap->disabled = false; bitmap->disabled = false;
@ -312,7 +307,7 @@ void bdrv_dirty_bitmap_truncate(BlockDriverState *bs, int64_t bytes)
QLIST_FOREACH(bitmap, &bs->dirty_bitmaps, list) { QLIST_FOREACH(bitmap, &bs->dirty_bitmaps, list) {
assert(!bdrv_dirty_bitmap_frozen(bitmap)); assert(!bdrv_dirty_bitmap_frozen(bitmap));
assert(!bitmap->active_iterators); assert(!bitmap->active_iterators);
hbitmap_truncate(bitmap->bitmap, DIV_ROUND_UP(bytes, BDRV_SECTOR_SIZE)); hbitmap_truncate(bitmap->bitmap, bytes);
bitmap->size = bytes; bitmap->size = bytes;
} }
bdrv_dirty_bitmaps_unlock(bs); bdrv_dirty_bitmaps_unlock(bs);
@ -442,7 +437,7 @@ bool bdrv_get_dirty_locked(BlockDriverState *bs, BdrvDirtyBitmap *bitmap,
int64_t offset) int64_t offset)
{ {
if (bitmap) { if (bitmap) {
return hbitmap_get(bitmap->bitmap, offset >> BDRV_SECTOR_BITS); return hbitmap_get(bitmap->bitmap, offset);
} else { } else {
return false; return false;
} }
@ -470,7 +465,7 @@ uint32_t bdrv_get_default_bitmap_granularity(BlockDriverState *bs)
uint32_t bdrv_dirty_bitmap_granularity(const BdrvDirtyBitmap *bitmap) uint32_t bdrv_dirty_bitmap_granularity(const BdrvDirtyBitmap *bitmap)
{ {
return BDRV_SECTOR_SIZE << hbitmap_granularity(bitmap->bitmap); return 1U << hbitmap_granularity(bitmap->bitmap);
} }
BdrvDirtyBitmapIter *bdrv_dirty_iter_new(BdrvDirtyBitmap *bitmap) BdrvDirtyBitmapIter *bdrv_dirty_iter_new(BdrvDirtyBitmap *bitmap)
@ -503,20 +498,16 @@ void bdrv_dirty_iter_free(BdrvDirtyBitmapIter *iter)
int64_t bdrv_dirty_iter_next(BdrvDirtyBitmapIter *iter) int64_t bdrv_dirty_iter_next(BdrvDirtyBitmapIter *iter)
{ {
int64_t ret = hbitmap_iter_next(&iter->hbi); return hbitmap_iter_next(&iter->hbi);
return ret < 0 ? -1 : ret * BDRV_SECTOR_SIZE;
} }
/* Called within bdrv_dirty_bitmap_lock..unlock */ /* Called within bdrv_dirty_bitmap_lock..unlock */
void bdrv_set_dirty_bitmap_locked(BdrvDirtyBitmap *bitmap, void bdrv_set_dirty_bitmap_locked(BdrvDirtyBitmap *bitmap,
int64_t offset, int64_t bytes) int64_t offset, int64_t bytes)
{ {
int64_t end_sector = DIV_ROUND_UP(offset + bytes, BDRV_SECTOR_SIZE);
assert(bdrv_dirty_bitmap_enabled(bitmap)); assert(bdrv_dirty_bitmap_enabled(bitmap));
assert(!bdrv_dirty_bitmap_readonly(bitmap)); assert(!bdrv_dirty_bitmap_readonly(bitmap));
hbitmap_set(bitmap->bitmap, offset >> BDRV_SECTOR_BITS, hbitmap_set(bitmap->bitmap, offset, bytes);
end_sector - (offset >> BDRV_SECTOR_BITS));
} }
void bdrv_set_dirty_bitmap(BdrvDirtyBitmap *bitmap, void bdrv_set_dirty_bitmap(BdrvDirtyBitmap *bitmap,
@ -531,12 +522,9 @@ void bdrv_set_dirty_bitmap(BdrvDirtyBitmap *bitmap,
void bdrv_reset_dirty_bitmap_locked(BdrvDirtyBitmap *bitmap, void bdrv_reset_dirty_bitmap_locked(BdrvDirtyBitmap *bitmap,
int64_t offset, int64_t bytes) int64_t offset, int64_t bytes)
{ {
int64_t end_sector = DIV_ROUND_UP(offset + bytes, BDRV_SECTOR_SIZE);
assert(bdrv_dirty_bitmap_enabled(bitmap)); assert(bdrv_dirty_bitmap_enabled(bitmap));
assert(!bdrv_dirty_bitmap_readonly(bitmap)); assert(!bdrv_dirty_bitmap_readonly(bitmap));
hbitmap_reset(bitmap->bitmap, offset >> BDRV_SECTOR_BITS, hbitmap_reset(bitmap->bitmap, offset, bytes);
end_sector - (offset >> BDRV_SECTOR_BITS));
} }
void bdrv_reset_dirty_bitmap(BdrvDirtyBitmap *bitmap, void bdrv_reset_dirty_bitmap(BdrvDirtyBitmap *bitmap,
@ -556,8 +544,7 @@ void bdrv_clear_dirty_bitmap(BdrvDirtyBitmap *bitmap, HBitmap **out)
hbitmap_reset_all(bitmap->bitmap); hbitmap_reset_all(bitmap->bitmap);
} else { } else {
HBitmap *backup = bitmap->bitmap; HBitmap *backup = bitmap->bitmap;
bitmap->bitmap = hbitmap_alloc(DIV_ROUND_UP(bitmap->size, bitmap->bitmap = hbitmap_alloc(bitmap->size,
BDRV_SECTOR_SIZE),
hbitmap_granularity(backup)); hbitmap_granularity(backup));
*out = backup; *out = backup;
} }
@ -576,51 +563,40 @@ void bdrv_undo_clear_dirty_bitmap(BdrvDirtyBitmap *bitmap, HBitmap *in)
uint64_t bdrv_dirty_bitmap_serialization_size(const BdrvDirtyBitmap *bitmap, uint64_t bdrv_dirty_bitmap_serialization_size(const BdrvDirtyBitmap *bitmap,
uint64_t offset, uint64_t bytes) uint64_t offset, uint64_t bytes)
{ {
assert(QEMU_IS_ALIGNED(offset | bytes, BDRV_SECTOR_SIZE)); return hbitmap_serialization_size(bitmap->bitmap, offset, bytes);
return hbitmap_serialization_size(bitmap->bitmap,
offset >> BDRV_SECTOR_BITS,
bytes >> BDRV_SECTOR_BITS);
} }
uint64_t bdrv_dirty_bitmap_serialization_align(const BdrvDirtyBitmap *bitmap) uint64_t bdrv_dirty_bitmap_serialization_align(const BdrvDirtyBitmap *bitmap)
{ {
return hbitmap_serialization_align(bitmap->bitmap) * BDRV_SECTOR_SIZE; return hbitmap_serialization_align(bitmap->bitmap);
} }
void bdrv_dirty_bitmap_serialize_part(const BdrvDirtyBitmap *bitmap, void bdrv_dirty_bitmap_serialize_part(const BdrvDirtyBitmap *bitmap,
uint8_t *buf, uint64_t offset, uint8_t *buf, uint64_t offset,
uint64_t bytes) uint64_t bytes)
{ {
assert(QEMU_IS_ALIGNED(offset | bytes, BDRV_SECTOR_SIZE)); hbitmap_serialize_part(bitmap->bitmap, buf, offset, bytes);
hbitmap_serialize_part(bitmap->bitmap, buf, offset >> BDRV_SECTOR_BITS,
bytes >> BDRV_SECTOR_BITS);
} }
void bdrv_dirty_bitmap_deserialize_part(BdrvDirtyBitmap *bitmap, void bdrv_dirty_bitmap_deserialize_part(BdrvDirtyBitmap *bitmap,
uint8_t *buf, uint64_t offset, uint8_t *buf, uint64_t offset,
uint64_t bytes, bool finish) uint64_t bytes, bool finish)
{ {
assert(QEMU_IS_ALIGNED(offset | bytes, BDRV_SECTOR_SIZE)); hbitmap_deserialize_part(bitmap->bitmap, buf, offset, bytes, finish);
hbitmap_deserialize_part(bitmap->bitmap, buf, offset >> BDRV_SECTOR_BITS,
bytes >> BDRV_SECTOR_BITS, finish);
} }
void bdrv_dirty_bitmap_deserialize_zeroes(BdrvDirtyBitmap *bitmap, void bdrv_dirty_bitmap_deserialize_zeroes(BdrvDirtyBitmap *bitmap,
uint64_t offset, uint64_t bytes, uint64_t offset, uint64_t bytes,
bool finish) bool finish)
{ {
assert(QEMU_IS_ALIGNED(offset | bytes, BDRV_SECTOR_SIZE)); hbitmap_deserialize_zeroes(bitmap->bitmap, offset, bytes, finish);
hbitmap_deserialize_zeroes(bitmap->bitmap, offset >> BDRV_SECTOR_BITS,
bytes >> BDRV_SECTOR_BITS, finish);
} }
void bdrv_dirty_bitmap_deserialize_ones(BdrvDirtyBitmap *bitmap, void bdrv_dirty_bitmap_deserialize_ones(BdrvDirtyBitmap *bitmap,
uint64_t offset, uint64_t bytes, uint64_t offset, uint64_t bytes,
bool finish) bool finish)
{ {
assert(QEMU_IS_ALIGNED(offset | bytes, BDRV_SECTOR_SIZE)); hbitmap_deserialize_ones(bitmap->bitmap, offset, bytes, finish);
hbitmap_deserialize_ones(bitmap->bitmap, offset >> BDRV_SECTOR_BITS,
bytes >> BDRV_SECTOR_BITS, finish);
} }
void bdrv_dirty_bitmap_deserialize_finish(BdrvDirtyBitmap *bitmap) void bdrv_dirty_bitmap_deserialize_finish(BdrvDirtyBitmap *bitmap)
@ -631,7 +607,6 @@ void bdrv_dirty_bitmap_deserialize_finish(BdrvDirtyBitmap *bitmap)
void bdrv_set_dirty(BlockDriverState *bs, int64_t offset, int64_t bytes) void bdrv_set_dirty(BlockDriverState *bs, int64_t offset, int64_t bytes)
{ {
BdrvDirtyBitmap *bitmap; BdrvDirtyBitmap *bitmap;
int64_t end_sector = DIV_ROUND_UP(offset + bytes, BDRV_SECTOR_SIZE);
if (QLIST_EMPTY(&bs->dirty_bitmaps)) { if (QLIST_EMPTY(&bs->dirty_bitmaps)) {
return; return;
@ -643,8 +618,7 @@ void bdrv_set_dirty(BlockDriverState *bs, int64_t offset, int64_t bytes)
continue; continue;
} }
assert(!bdrv_dirty_bitmap_readonly(bitmap)); assert(!bdrv_dirty_bitmap_readonly(bitmap));
hbitmap_set(bitmap->bitmap, offset >> BDRV_SECTOR_BITS, hbitmap_set(bitmap->bitmap, offset, bytes);
end_sector - (offset >> BDRV_SECTOR_BITS));
} }
bdrv_dirty_bitmaps_unlock(bs); bdrv_dirty_bitmaps_unlock(bs);
} }
@ -654,12 +628,12 @@ void bdrv_set_dirty(BlockDriverState *bs, int64_t offset, int64_t bytes)
*/ */
void bdrv_set_dirty_iter(BdrvDirtyBitmapIter *iter, int64_t offset) void bdrv_set_dirty_iter(BdrvDirtyBitmapIter *iter, int64_t offset)
{ {
hbitmap_iter_init(&iter->hbi, iter->hbi.hb, offset >> BDRV_SECTOR_BITS); hbitmap_iter_init(&iter->hbi, iter->hbi.hb, offset);
} }
int64_t bdrv_get_dirty_count(BdrvDirtyBitmap *bitmap) int64_t bdrv_get_dirty_count(BdrvDirtyBitmap *bitmap)
{ {
return hbitmap_count(bitmap->bitmap) << BDRV_SECTOR_BITS; return hbitmap_count(bitmap->bitmap);
} }
int64_t bdrv_get_meta_dirty_count(BdrvDirtyBitmap *bitmap) int64_t bdrv_get_meta_dirty_count(BdrvDirtyBitmap *bitmap)