Skip to content

Commit 0eea504

Browse files
lyarwoodstephenfin
authored andcommitted
compute: Use source_bdms to reset attachment_ids during LM rollback
As we now provide the original source bdms during rollback we can also use them to reset any volume attachment ids to the ids used by the source compute host. Change-Id: Ibe9215c07a1ee00e0e121c69bcf7ee1b1b80fae0
1 parent 0c39090 commit 0eea504

File tree

3 files changed

+34
-24
lines changed

3 files changed

+34
-24
lines changed

nova/compute/manager.py

Lines changed: 9 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -6541,7 +6541,7 @@ def pre_live_migration(self, context, instance, block_migration, disk,
65416541
# save current attachment so we can detach it on success,
65426542
# or restore it on a rollback.
65436543
# NOTE(mdbooth): This data is no longer used by the source
6544-
# host since change I0390c9ff. We can't remove it until we
6544+
# host since change Ibe9215c0. We can't remove it until we
65456545
# are sure the source host has been upgraded.
65466546
migrate_data.old_vol_attachment_ids[bdm.volume_id] = \
65476547
bdm.attachment_id
@@ -7342,24 +7342,17 @@ def _rollback_live_migration(self, context, instance,
73427342
self.compute_rpcapi.remove_volume_connection(
73437343
context, instance, bdm.volume_id, dest)
73447344

7345-
if bdm.attachment_id:
7346-
# 3.44 cinder api flow. Set the bdm's
7347-
# attachment_id to the old attachment of the source
7348-
# host. If old_attachments is not there, then
7349-
# there was an error before the new attachment was made.
7350-
# TODO(lyarwood): migrate_data.old_vol_attachment_ids can
7351-
# be removed now as we can lookup the original
7352-
# attachment_ids from the source_bdms list here.
7353-
old_attachments = migrate_data.old_vol_attachment_ids \
7354-
if 'old_vol_attachment_ids' in migrate_data else None
7355-
if old_attachments and bdm.volume_id in old_attachments:
7356-
self.volume_api.attachment_delete(context,
7357-
bdm.attachment_id)
7358-
bdm.attachment_id = old_attachments[bdm.volume_id]
7345+
source_bdm = source_bdms_by_volid[bdm.volume_id]
7346+
if bdm.attachment_id and source_bdm.attachment_id:
7347+
# NOTE(lyarwood): 3.44 cinder api flow. Delete the
7348+
# attachment used by the destination and reset the bdm
7349+
# attachment_id to the old attachment of the source host.
7350+
self.volume_api.attachment_delete(context,
7351+
bdm.attachment_id)
7352+
bdm.attachment_id = source_bdm.attachment_id
73597353

73607354
# NOTE(lyarwood): Rollback the connection_info stored within
73617355
# the BDM to that used by the source and not the destination.
7362-
source_bdm = source_bdms_by_volid[bdm.volume_id]
73637356
bdm.connection_info = source_bdm.connection_info
73647357
bdm.save()
73657358

nova/objects/migrate_data.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ class LiveMigrateData(obj_base.NovaObject):
124124
# old_vol_attachment_ids is a dict used to store the old attachment_ids
125125
# for each volume so they can be restored on a migration rollback. The
126126
# key is the volume_id, and the value is the attachment_id.
127-
# TODO(mdbooth): This field was made redundant by change I0390c9ff. We
127+
# TODO(mdbooth): This field was made redundant by change Ibe9215c0. We
128128
# should eventually remove it.
129129
'old_vol_attachment_ids': fields.DictOfStringsField(),
130130
# wait_for_vif_plugged is set in pre_live_migration on the destination

nova/tests/unit/compute/test_compute.py

Lines changed: 24 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6211,6 +6211,7 @@ def stupid(*args, **kwargs):
62116211
# cleanup
62126212
db.instance_destroy(c, instance['uuid'])
62136213

6214+
@mock.patch.object(cinder.API, 'attachment_delete')
62146215
@mock.patch.object(fake.FakeDriver, 'get_instance_disk_info')
62156216
@mock.patch.object(compute_rpcapi.ComputeAPI, 'pre_live_migration')
62166217
@mock.patch.object(objects.ComputeNode,
@@ -6223,7 +6224,8 @@ def stupid(*args, **kwargs):
62236224
def test_live_migration_exception_rolls_back(self, mock_save,
62246225
mock_rollback, mock_remove,
62256226
mock_get_bdms,
6226-
mock_get_node, mock_pre, mock_get_disk):
6227+
mock_get_node, mock_pre, mock_get_disk,
6228+
mock_attachment_delete):
62276229
# Confirm exception when pre_live_migration fails.
62286230
c = context.get_admin_context()
62296231

@@ -6241,27 +6243,34 @@ def test_live_migration_exception_rolls_back(self, mock_save,
62416243
# All the fake BDMs we've generated, in order
62426244
fake_bdms = []
62436245

6246+
# A list of the attachment_ids returned by gen_fake_bdms
6247+
fake_attachment_ids = []
6248+
62446249
def gen_fake_bdms(obj, instance):
6245-
# generate a unique fake connection_info every time we're called,
6246-
# simulating connection_info being mutated elsewhere.
6250+
# generate a unique fake connection_info and attachment_id every
6251+
# time we're called, simulating attachment_id and connection_info
6252+
# being mutated elsewhere.
62476253
bdms = objects.BlockDeviceMappingList(objects=[
62486254
objects.BlockDeviceMapping(
6249-
**fake_block_device.FakeDbBlockDeviceDict(
6255+
**fake_block_device.AnonFakeDbBlockDeviceDict(
62506256
{'volume_id': uuids.volume_id_1,
6257+
'attachment_id': uuidutils.generate_uuid(),
62516258
'source_type': 'volume',
62526259
'connection_info':
62536260
jsonutils.dumps(uuidutils.generate_uuid()),
62546261
'destination_type': 'volume'})),
62556262
objects.BlockDeviceMapping(
6256-
**fake_block_device.FakeDbBlockDeviceDict(
6263+
**fake_block_device.AnonFakeDbBlockDeviceDict(
62576264
{'volume_id': uuids.volume_id_2,
6265+
'attachment_id': uuidutils.generate_uuid(),
62586266
'source_type': 'volume',
62596267
'connection_info':
62606268
jsonutils.dumps(uuidutils.generate_uuid()),
62616269
'destination_type': 'volume'}))
62626270
])
62636271
for bdm in bdms:
62646272
bdm.save = mock.Mock()
6273+
fake_attachment_ids.append(bdm.attachment_id)
62656274
fake_bdms.append(bdms)
62666275
return bdms
62676276

@@ -6312,10 +6321,13 @@ def do_it(mock_client, mock_setup):
63126321
# BDMs with unique connection_info every time it's called. These are
63136322
# stored in fake_bdms in the order they were generated. We assert here
63146323
# that the last BDMs generated (in _rollback_live_migration) now have
6315-
# the same connection_info as the first BDMs generated (before calling
6316-
# pre_live_migration), and that we saved them.
6324+
# the same connection_info and attachment_id as the first BDMs
6325+
# generated (before calling pre_live_migration), and that we saved
6326+
# them.
63176327
self.assertGreater(len(fake_bdms), 1)
63186328
for source_bdm, final_bdm in zip(fake_bdms[0], fake_bdms[-1]):
6329+
self.assertEqual(source_bdm.attachment_id,
6330+
final_bdm.attachment_id)
63196331
self.assertEqual(source_bdm.connection_info,
63206332
final_bdm.connection_info)
63216333
final_bdm.save.assert_called()
@@ -6328,6 +6340,11 @@ def do_it(mock_client, mock_setup):
63286340
destroy_disks=True,
63296341
migrate_data=test.MatchType(
63306342
migrate_data_obj.XenapiLiveMigrateData))
6343+
# Assert that the final attachment_ids returned by
6344+
# BlockDeviceMappingList.get_by_instance_uuid are then deleted.
6345+
mock_attachment_delete.assert_has_calls([
6346+
mock.call(c, fake_attachment_ids.pop()),
6347+
mock.call(c, fake_attachment_ids.pop())], any_order=True)
63316348

63326349
@mock.patch.object(compute_rpcapi.ComputeAPI, 'pre_live_migration')
63336350
@mock.patch.object(compute_rpcapi.ComputeAPI,

0 commit comments

Comments
 (0)