Merge "Update conductor and filters allowing migration with SR-IOV devices"

This commit is contained in:
Zuul
2025-03-10 14:36:36 +00:00
committed by Gerrit Code Review
8 changed files with 317 additions and 38 deletions
+28 -8
View File
@@ -229,20 +229,34 @@ class LiveMigrationTask(base.TaskBase):
At the moment support only if:
1. Instance contains VIF related PCI requests.
2. Neutron supports multiple port binding extension.
3. Src and Dest host support VIF related PCI allocations.
a) Instance contains flavor based PCI requests configured with
live_migratable tag specified.
b) Instance contains neutron port related PCI request and:
- Neutron supports multiple port binding extension.
- Src and Dest host support VIF related PCI allocations.
"""
if self.instance.pci_requests is None or not len(
self.instance.pci_requests.requests):
return
for pci_request in self.instance.pci_requests.requests:
if pci_request.source != objects.InstancePCIRequest.NEUTRON_PORT:
# allow only VIF related PCI requests in live migration.
raise exception.MigrationPreCheckError(
reason= "non-VIF related PCI requests for instance "
"are not allowed for live migration.")
if pci_request.source == objects.InstancePCIRequest.FLAVOR_ALIAS:
# A pre-Epoxy instance using a device that would
# technically be live-migratable will be accepted only if
# all InstancePCIRequests have the 'live_migratable' flag
# set to "true".
if not pci_request.is_live_migratable():
# Ensure the request explicitly requests migratable devices
raise exception.MigrationPreCheckError(
reason="This request does not explicitly request "
"live-migratable devices."
)
if not self.instance.pci_requests.neutron_requests():
return
# All PCI requests are VIF related, now check neutron,
# source and destination compute nodes.
if not self.network_api.has_port_binding_extension(self.context):
@@ -483,6 +497,12 @@ class LiveMigrationTask(base.TaskBase):
request_spec.requested_resources = port_res_req
request_spec.request_level_params = req_lvl_params
# NOTE(gibi): as PCI devices is tracked in placement we
# need to generate request groups from InstancePCIRequests.
# This will append new RequestGroup objects to the
# request_spec.requested_resources list if needed
request_spec.generate_request_groups_from_pci_requests()
scheduler_utils.setup_instance_group(self.context, request_spec)
# We currently only support live migrating to hosts in the same
+18
View File
@@ -67,6 +67,15 @@ class InstancePCIRequest(base.NovaObject):
if target_version < (1, 1) and 'request_id' in primitive:
del primitive['request_id']
def is_live_migratable(self):
return (
"spec" in self and
self.spec is not None and
all(
spec.get("live_migratable") == "true" for spec in self.spec
)
)
@base.NovaObjectRegistry.register
class InstancePCIRequests(base.NovaObject):
@@ -151,3 +160,12 @@ class InstancePCIRequests(base.NovaObject):
'request_id': x.request_id,
'requester_id': x.requester_id} for x in self.requests]
return jsonutils.dumps(blob)
def neutron_requests(self):
return all(
[
req
for req in self.requests
if req.source == InstancePCIRequest.NEUTRON_PORT
]
)
+13
View File
@@ -53,6 +53,7 @@ from nova.network import model as network_model
from nova import objects
from nova.objects import fields as obj_fields
from nova.pci import utils
from oslo_utils import strutils
Alias = ty.Dict[str, ty.Tuple[str, ty.List[ty.Dict[str, str]]]]
@@ -112,6 +113,9 @@ _ALIAS_SCHEMA = {
"traits": {
"type": "string",
},
"live_migratable": {
"type": "string",
},
},
"required": ["name"],
}
@@ -144,6 +148,15 @@ def _get_alias_from_config() -> Alias:
if dev_type:
spec['dev_type'] = dev_type
live_migratable = spec.pop('live_migratable', None)
if live_migratable is not None:
live_migratable = (
"true"
if strutils.bool_from_string(live_migratable, strict=True)
else "false"
)
spec['live_migratable'] = live_migratable
if name not in aliases:
aliases[name] = (numa_policy, [spec])
continue
+42
View File
@@ -608,6 +608,38 @@ class PciDeviceStats(object):
return matching_pools
def _filter_pools_for_live_migratable_devices(
self, pools: ty.List[Pool], request: 'objects.InstancePCIRequest',
) -> ty.List[Pool]:
"""Filter out pools with non live_migratable devices.
:param pools: A list of PCI device pool dicts
:param request: An InstancePCIRequest object describing the type,
quantity and required NUMA affinity of device(s) we want.
:returns: A list of pools that can be used to support the request if
this is possible.
"""
# The following code handles the case where 'live_migratable' is
# set (either "true" or "false") by filtering devices to select the
# appropriate ones.
# If it is not set, we skip the next code block and no filtering
# is applied to the pools.
if all(spec.get("live_migratable") == 'true' for spec in request.spec):
#  if all specs require live migratable devices, then we need to
#  reduce the pools by the ones that support them.
pools = [pool for pool in pools if pool.get("live_migratable") and
pool['live_migratable'] == 'true']
elif all(
spec.get("live_migratable") == "false" for spec in request.spec
):
#  If the request asks to NOT support live-migratable devices, then
#  we don't provide the ones that support them.
#  We want to exclude the devices that don't have this value yet.
pools = [pool for pool in pools if pool.get("live_migratable") and
pool['live_migratable'] == 'false']
return pools
def _filter_pools(
self,
pools: ty.List[Pool],
@@ -728,6 +760,16 @@ class PciDeviceStats(object):
before_count - after_count
)
before_count = after_count
pools = self._filter_pools_for_live_migratable_devices(
pools, request)
after_count = sum([pool['count'] for pool in pools])
if after_count < before_count:
LOG.debug(
'Dropped %d device(s) that are not live migratable',
before_count - after_count
)
if after_count < request.count:
LOG.debug('Not enough PCI devices left to satisfy request')
return None
+1 -1
View File
@@ -5272,7 +5272,7 @@ class ComputeManagerUnitTestCase(test.NoDBTestCase,
{
"vendor_id": "1377",
"product_id": "0047",
"live_migratable": "yes",
"live_migratable": "true",
}
],
),
@@ -433,8 +433,10 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
return_value=[[fake_selection1]])
@mock.patch.object(objects.RequestSpec, 'reset_forced_destinations')
@mock.patch.object(scheduler_utils, 'setup_instance_group')
def test_find_destination_works(self, mock_setup, mock_reset, mock_select,
mock_check, mock_call):
@mock.patch.object(objects.RequestSpec,
'generate_request_groups_from_pci_requests')
def test_find_destination_works(self, mock_gengrp, mock_setup, mock_reset,
mock_select, mock_check, mock_call):
self.assertEqual(("host1", "node1", fake_limits1),
self.task._find_destination())
@@ -444,6 +446,7 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
# Make sure the spec was updated to include the project_id.
self.assertEqual(self.fake_spec.project_id, self.instance.project_id)
mock_gengrp.assert_called_once()
mock_setup.assert_called_once_with(self.context, self.fake_spec)
mock_reset.assert_called_once_with()
self.ensure_network_information_mock.assert_called_once_with(
@@ -462,13 +465,17 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
@mock.patch.object(query.SchedulerQueryClient, 'select_destinations',
return_value=[[fake_selection1]])
@mock.patch.object(scheduler_utils, 'setup_instance_group')
def test_find_destination_no_image_works(self, mock_setup, mock_select,
mock_check, mock_call):
@mock.patch.object(objects.RequestSpec,
'generate_request_groups_from_pci_requests')
def test_find_destination_no_image_works(
self, mock_gengrp, mock_setup, mock_select, mock_check, mock_call
):
self.instance['image_ref'] = ''
self.assertEqual(("host1", "node1", fake_limits1),
self.task._find_destination())
mock_gengrp.assert_called_once()
mock_setup.assert_called_once_with(self.context, self.fake_spec)
mock_select.assert_called_once_with(
self.context, self.fake_spec, [self.instance.uuid],
@@ -520,8 +527,11 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
@mock.patch.object(query.SchedulerQueryClient, 'select_destinations',
side_effect=[[[fake_selection1]], [[fake_selection2]]])
@mock.patch.object(scheduler_utils, 'setup_instance_group')
@mock.patch.object(objects.RequestSpec,
'generate_request_groups_from_pci_requests')
def test_find_destination_retry_with_invalid_livem_checks(
self, mock_setup, mock_select, mock_check, mock_call, mock_remove):
self, mock_gengrp, mock_setup, mock_select, mock_check,
mock_call, mock_remove):
self.flags(migrate_max_retries=1)
mock_call.side_effect = [exception.Invalid(), None]
@@ -529,6 +539,7 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
self.task._find_destination())
# Should have removed allocations for the first host.
mock_remove.assert_called_once_with(fake_selection1.compute_node_uuid)
mock_gengrp.assert_called_once()
mock_setup.assert_called_once_with(self.context, self.fake_spec)
mock_select.assert_has_calls([
mock.call(self.context, self.fake_spec, [self.instance.uuid],
@@ -548,8 +559,11 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
@mock.patch.object(query.SchedulerQueryClient, 'select_destinations',
side_effect=[[[fake_selection1]], [[fake_selection2]]])
@mock.patch.object(scheduler_utils, 'setup_instance_group')
@mock.patch.object(objects.RequestSpec,
'generate_request_groups_from_pci_requests')
def test_find_destination_retry_with_failed_migration_pre_checks(
self, mock_setup, mock_select, mock_check, mock_call, mock_remove):
self, mock_gengrp, mock_setup, mock_select, mock_check,
mock_call, mock_remove):
self.flags(migrate_max_retries=1)
mock_call.side_effect = [exception.MigrationPreCheckError('reason'),
None]
@@ -558,6 +572,7 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
self.task._find_destination())
# Should have removed allocations for the first host.
mock_remove.assert_called_once_with(fake_selection1.compute_node_uuid)
mock_gengrp.assert_called_once()
mock_setup.assert_called_once_with(self.context, self.fake_spec)
mock_select.assert_has_calls([
mock.call(self.context, self.fake_spec, [self.instance.uuid],
@@ -577,8 +592,11 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
@mock.patch.object(query.SchedulerQueryClient, 'select_destinations',
return_value=[[fake_selection1]])
@mock.patch.object(scheduler_utils, 'setup_instance_group')
@mock.patch.object(objects.RequestSpec,
'generate_request_groups_from_pci_requests')
def test_find_destination_retry_exceeds_max(
self, mock_setup, mock_select, mock_check, mock_remove, mock_save):
self, mock_gengrp, mock_setup, mock_select, mock_check,
mock_remove, mock_save):
self.flags(migrate_max_retries=0)
self.assertRaises(exception.MaxRetriesExceeded,
@@ -587,6 +605,7 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
mock_save.assert_called_once_with()
# Should have removed allocations for the first host.
mock_remove.assert_called_once_with(fake_selection1.compute_node_uuid)
mock_gengrp.assert_called_once()
mock_setup.assert_called_once_with(self.context, self.fake_spec)
mock_select.assert_called_once_with(
self.context, self.fake_spec, [self.instance.uuid],
@@ -596,9 +615,13 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
@mock.patch.object(query.SchedulerQueryClient, 'select_destinations',
side_effect=exception.NoValidHost(reason=""))
@mock.patch.object(scheduler_utils, 'setup_instance_group')
def test_find_destination_when_runs_out_of_hosts(self, mock_setup,
mock_select):
@mock.patch.object(objects.RequestSpec,
'generate_request_groups_from_pci_requests')
def test_find_destination_when_runs_out_of_hosts(
self, mock_gengrp, mock_setup, mock_select
):
self.assertRaises(exception.NoValidHost, self.task._find_destination)
mock_gengrp.assert_called_once()
mock_setup.assert_called_once_with(self.context, self.fake_spec)
mock_select.assert_called_once_with(
self.context, self.fake_spec, [self.instance.uuid],
@@ -837,10 +860,18 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
remove_provider.assert_called_once_with(
self.task.context, self.task.instance.uuid, uuids.cn)
def test_check_can_migrate_pci(self):
"""Tests that _check_can_migrate_pci() allows live-migration if
instance does not contain non-network related PCI requests and
raises MigrationPreCheckError otherwise
@mock.patch.object(objects.Instance, 'get_pci_devices')
def test_check_can_migrate_pci(self, mock_get_pci):
"""Tests that _check_can_migrate_pci() allows live migration
if the instance contains:
- Network PCI requests.
- Non-network PCI requests that are live migratable.
- A combination of the above.
Raises MigrationPreCheckError if:
- Non-network PCI requests are not live migratable.
- Network PCI requests involve devices that lack binding extensions
or do not support VIF-related PCI allocations.
"""
@mock.patch.object(self.task.network_api,
@@ -864,6 +895,32 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
self.context)
self.assertTrue(mock_supp_vif_related_pci_alloc.called)
def _assert_precheck_error(msg):
exc = self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, False, False)
self.assertIn(msg, str(exc))
exc = self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, True, False)
self.assertIn(msg, str(exc))
exc = self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, False, True)
self.assertIn(msg, str(exc))
exc = self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, True, True)
self.assertIn(msg, str(exc))
fake_devs = objects.PciDeviceList(
objects=[
objects.PciDevice(
compute_node_id=1,
address="0000:04:00.3",
vendor_id="1377",
product_id="0047",
extra_info={"live_migratable": "true"},
request_id=uuids.pcidev1,
)
]
)
# instance has no PCI requests
_test(None, False, False) # No support in Neutron and Computes
_test(None, True, False) # No support in Computes
@@ -879,17 +936,39 @@ class LiveMigrationTaskTestCase(test.NoDBTestCase):
self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, False, True)
_test(pci_requests, True, True)
# instance contains Non network related PCI requests (alias_name!=None)
# instance contains Non network (flavor based) related PCI
# requests (alias_name!=None)
# Warning, we are appending, so we have 2 requests
# 1 x neutron_port + 1 x flavor_based
pci_requests.requests.append(
objects.InstancePCIRequest(alias_name="non-network-related-pci"))
objects.InstancePCIRequest(
alias_name="non-network-related-pci",
spec=[{"live_migratable": "true"}],
)
)
mock_get_pci.return_value = fake_devs
self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, False, False)
self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, True, False)
self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, False, True)
self.assertRaises(exception.MigrationPreCheckError,
_test, pci_requests, True, True)
_test(pci_requests, True, True)
# Simulate we have a flavor based request not requesting explicitly
# live migratable devices.
# device.
pci_requests = objects.InstancePCIRequests(
requests=[
objects.InstancePCIRequest(
alias_name="non-network-related-pci",
),
]
)
_assert_precheck_error(
"This request does not explicitly request "
"live-migratable devices."
)
def test_check_can_migrate_specific_resources(self):
"""Test _check_can_migrate_specific_resources allows live migration
+20 -8
View File
@@ -37,6 +37,7 @@ _fake_alias1 = jsonutils.dumps({
"vendor_id": "8086",
"device_type": "type-PCI",
"numa_policy": "legacy",
"live_migratable": "true",
})
_fake_alias2 = jsonutils.dumps({
@@ -45,6 +46,7 @@ _fake_alias2 = jsonutils.dumps({
"product_id": "1111",
"vendor_id": "8086",
"device_type": "type-PF",
"live_migratable": "false",
})
@@ -87,6 +89,7 @@ class PciRequestTestCase(test.NoDBTestCase):
"product_id": "4443",
"vendor_id": "8086",
"dev_type": "type-PCI",
"live_migratable": "true",
}])
self.assertEqual(expected_result, result['QuickAssist'])
@@ -107,7 +110,8 @@ class PciRequestTestCase(test.NoDBTestCase):
"capability_type": "pci",
"product_id": "4443",
"vendor_id": "8086",
"dev_type": "type-PCI"
"dev_type": "type-PCI",
"live_migratable": "true",
}, {
"capability_type": "pci",
"product_id": "4444",
@@ -129,6 +133,13 @@ class PciRequestTestCase(test.NoDBTestCase):
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_invalid_live_migratable(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
"live_migratable": "invalid",
})
self._test_get_alias_from_config_invalid(fake_alias)
def test_get_alias_from_config_device_type_vdpa(self):
fake_alias = jsonutils.dumps({
"name": "xxx",
@@ -259,14 +270,14 @@ class PciRequestTestCase(test.NoDBTestCase):
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '4443',
'dev_type': 'type-PCI',
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'true'}],
'alias_name': 'QuickAssist'},
{'count': 1,
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'false'}],
'alias_name': 'IntelNIC'}, ]
requests = request._translate_alias_to_requests(
@@ -292,7 +303,7 @@ class PciRequestTestCase(test.NoDBTestCase):
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '4443',
'dev_type': 'type-PCI',
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'true'}],
'alias_name': 'QuickAssist',
'numa_policy': policy
},
@@ -301,7 +312,7 @@ class PciRequestTestCase(test.NoDBTestCase):
'requester_id': None,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'false'}],
'alias_name': 'IntelNIC',
'numa_policy': policy
}, ]
@@ -417,6 +428,7 @@ class PciRequestTestCase(test.NoDBTestCase):
'product_id': '4443',
'dev_type': "type-PCI",
'capability_type': 'pci',
'live_migratable': 'true',
}
],
'alias_name': 'QuickAssist'
@@ -435,13 +447,13 @@ class PciRequestTestCase(test.NoDBTestCase):
{'count': 3,
'spec': [{'vendor_id': '8086', 'product_id': '4443',
'dev_type': "type-PCI",
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'true'}],
'alias_name': 'QuickAssist'},
{'count': 1,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'false'}],
'alias_name': 'IntelNIC'}, ]
flavor = {'extra_specs': {"pci_passthrough:alias":
@@ -471,7 +483,7 @@ class PciRequestTestCase(test.NoDBTestCase):
{'count': 3,
'spec': [{'vendor_id': '8086', 'product_id': '1111',
'dev_type': "type-PF",
'capability_type': 'pci'}],
'capability_type': 'pci', 'live_migratable': 'false'}],
'alias_name': 'IntelNIC'}, ]
flavor = {'extra_specs': {"pci_passthrough:alias":
+99 -4
View File
@@ -511,6 +511,20 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
"remote_managed": "false",
}
),
jsonutils.dumps(
{
"vendor_id": "15b4",
"product_id": "102e",
"live_migratable": "true",
}
),
jsonutils.dumps(
{
"vendor_id": "15b5",
"product_id": "102f",
"live_migratable": "false",
}
),
]
self.flags(device_spec=device_spec, group="pci")
dev_filter = whitelist.Whitelist(device_spec)
@@ -593,6 +607,40 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
'parent_addr': '0000:0e:00.0',
'numa_node': 0}))
# live_migratable set to true
# Warning: 'extra_info' field is set by the init or create method
self.live_migratable = []
self.live_migratable.append(
objects.PciDevice.create(
None, {
'compute_node_id': 1,
'address': '0000:0f:00.1',
'vendor_id': '15b4',
'product_id': '102e',
'status': 'available',
'request_id': None,
'dev_type': fields.PciDeviceType.SRIOV_VF,
'parent_addr': '0000:0f:00.0',
'numa_node': 0,
}))
# live_migratable set to false
# Warning: 'extra_info' field is set by the init or create method
self.non_live_migratable = []
self.non_live_migratable.append(
objects.PciDevice.create(
None, {
'compute_node_id': 1,
'address': '0000:10:00.1',
'vendor_id': '15b5',
'product_id': '102f',
'status': 'available',
'request_id': None,
'dev_type': fields.PciDeviceType.SRIOV_VF,
'parent_addr': '0000:10:00.0',
'numa_node': 0,
}))
for dev in self.pci_tagged_devices:
self.pci_stats.add_device(dev)
@@ -605,6 +653,12 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
for dev in self.locally_managed_netdevs:
self.pci_stats.add_device(dev)
for dev in self.live_migratable:
self.pci_stats.add_device(dev)
for dev in self.non_live_migratable:
self.pci_stats.add_device(dev)
def _assertPoolContent(self, pool, vendor_id, product_id, count, **tags):
self.assertEqual(vendor_id, pool['vendor_id'])
self.assertEqual(product_id, pool['product_id'])
@@ -618,8 +672,10 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
nr_untagged = len(self.pci_untagged_devices)
nr_remote = len(self.remote_managed_netdevs)
nr_local = len(self.locally_managed_netdevs)
nr_lm = len(self.live_migratable)
nr_nlm = len(self.non_live_migratable)
self.assertEqual(
nr_tagged + nr_untagged + nr_remote + nr_local,
nr_tagged + nr_untagged + nr_remote + nr_local + nr_lm + nr_nlm,
len(self.pci_stats.pools),
)
# Pools are ordered based on the number of keys. 'product_id',
@@ -677,6 +733,32 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
self.assertEqual(self.locally_managed_netdevs, devs)
j += nr_local
# one live_migratable
devs = []
for i in range(j, j + nr_lm):
self._assertPoolContent(
self.pci_stats.pools[i],
"15b4",
"102e",
1,
)
devs += self.pci_stats.pools[i]['devices']
self.assertEqual(self.live_migratable, devs)
j += nr_lm
# one non_live_migratable
devs = []
for i in range(j, j + nr_nlm):
self._assertPoolContent(
self.pci_stats.pools[i],
"15b5",
"102f",
1,
)
devs += self.pci_stats.pools[i]['devices']
self.assertEqual(self.non_live_migratable, devs)
j += nr_nlm
def test_add_devices(self):
self._create_pci_devices()
self._assertPools()
@@ -699,10 +781,19 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': '15b3',
'product_id': '101c',
PCI_REMOTE_MANAGED_TAG: 'False'}])]
PCI_REMOTE_MANAGED_TAG: 'False'}]),
objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': '15b4',
'product_id': '102e',
'live_migratable': 'true'}]),
objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': '15b5',
'product_id': '102f',
'live_migratable': 'false'}]),
]
devs = self.pci_stats.consume_requests(pci_requests)
self.assertEqual(5, len(devs))
self.assertEqual(set(['0071', '0072', '101e', '101c']),
self.assertEqual(7, len(devs))
self.assertEqual(set(['0071', '0072', '101e', '101c', '102e', '102f']),
set([dev.product_id for dev in devs]))
self._assertPoolContent(self.pci_stats.pools[0], '1137', '0072', 0)
self._assertPoolContent(self.pci_stats.pools[1], '1137', '0072', 1)
@@ -723,6 +814,10 @@ class PciDeviceStatsWithTagsTestCase(test.NoDBTestCase):
remote_managed='false')
self._assertPoolContent(self.pci_stats.pools[9], '15b3', '101c', 0,
remote_managed='false')
self._assertPoolContent(self.pci_stats.pools[10], '15b4', '102e', 0,
live_migratable='true')
self._assertPoolContent(self.pci_stats.pools[11], '15b5', '102f', 0,
live_migratable='false')
def test_add_device_no_devspec(self):
self._create_pci_devices()